diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f470b9f..765249a6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ default_language_version: exclude: "^src/atomate2/vasp/schemas/calc_types/" repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.4.2 + rev: v0.6.8 hooks: - id: ruff args: [--fix] @@ -17,7 +17,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/asottile/blacken-docs - rev: 1.16.0 + rev: 1.18.0 hooks: - id: blacken-docs additional_dependencies: [black] @@ -43,7 +43,7 @@ repos: - id: rst-directive-colons - id: rst-inline-touching-normal - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.11.2 hooks: - id: mypy files: ^src/ @@ -52,7 +52,7 @@ repos: - types-pkg_resources==0.1.2 - types-paramiko - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 + rev: v2.3.0 hooks: - id: codespell stages: [commit, commit-msg] diff --git a/docs/tutorials/4-creating-flows.ipynb b/docs/tutorials/4-creating-flows.ipynb index eba524b4..50c2ab91 100644 --- a/docs/tutorials/4-creating-flows.ipynb +++ b/docs/tutorials/4-creating-flows.ipynb @@ -51,10 +51,12 @@ "source": [ "from jobflow import job\n", "\n", + "\n", "@job\n", "def add(a, b, c=2):\n", " return a + b + c\n", "\n", + "\n", "@job\n", "def mult(a, b):\n", " return a * b" diff --git a/docs/tutorials/5-dynamic-flows.ipynb b/docs/tutorials/5-dynamic-flows.ipynb index 43844076..b97579de 100644 --- a/docs/tutorials/5-dynamic-flows.ipynb +++ b/docs/tutorials/5-dynamic-flows.ipynb @@ -42,12 +42,12 @@ "execution_count": 1, "id": "78348798", "metadata": { - "nbsphinx": "hidden", - "tags": [], "ExecuteTime": { "end_time": "2023-11-23T22:55:01.113171569Z", "start_time": "2023-11-23T22:55:01.112486877Z" - } + }, + "nbsphinx": "hidden", + "tags": [] }, "outputs": [], "source": [ @@ -98,26 +98,28 @@ ], "source": [ "from random import randint\n", - "from jobflow import job, Flow, Response\n", + "\n", + "from jobflow import Flow, Response, job\n", "from jobflow.managers.local import run_locally\n", "\n", + "\n", "@job\n", "def make_list(a):\n", " return [a] * randint(2, 5)\n", "\n", + "\n", "@job\n", "def add(a, b):\n", " return a + b\n", "\n", + "\n", "@job\n", "def add_distributed(list_a):\n", - " jobs = []\n", - " for val in list_a:\n", - " jobs.append(add(val, 1))\n", - " \n", + " jobs = [add(val, 1) for val in list_a]\n", " flow = Flow(jobs)\n", " return Response(replace=flow)\n", "\n", + "\n", "job1 = make_list(2)\n", "job2 = add_distributed(job1.output)\n", "flow = Flow([job1, job2])\n", @@ -215,14 +217,17 @@ ], "source": [ "@job\n", - "def add(a, b):\n", + "def add(a, b): # noqa: F811\n", " return a + b\n", "\n", + "\n", "@job\n", "def add_with_logic(a, b):\n", " if a < 10:\n", " return Response(addition=add(a, b))\n", - " \n", + " return None\n", + "\n", + "\n", "job1 = add(1, 2)\n", "job2 = add_with_logic(job1.output, 2)\n", "flow = Flow([job1, job2])\n", @@ -294,11 +299,14 @@ "def add(a, b):\n", " return a + b\n", "\n", + "\n", "@job\n", "def add_with_logic(a, b):\n", " if a < 10:\n", " return Response(addition=add(a, b))\n", - " \n", + " return None\n", + "\n", + "\n", "job1 = add(1, 20)\n", "job2 = add_with_logic(job1.output, 20)\n", "flow = Flow([job1, job2])\n", @@ -341,16 +349,25 @@ }, { "cell_type": "markdown", - "source": [ - "In this way, one can also compute the Fibonacci numbers:" - ], + "id": "7fb27b941602401d91542211134fc71a", "metadata": { "collapsed": false - } + }, + "source": [ + "In this way, one can also compute the Fibonacci numbers:" + ] }, { "cell_type": "code", "execution_count": 8, + "id": "acae54e37e7d407bbb7b55eff062a284", + "metadata": { + "ExecuteTime": { + "end_time": "2023-11-23T22:55:13.426518952Z", + "start_time": "2023-11-23T22:55:13.322421257Z" + }, + "collapsed": false + }, "outputs": [ { "name": "stdout", @@ -392,13 +409,12 @@ } ], "source": [ - "\"\"\"A dynamic workflow that calculates the Fibonacci sequence.\"\"\"\n", - "from jobflow import Response, job, run_locally\n", + "from jobflow import job, run_locally\n", "\n", "\n", "@job\n", "def fibonacci(smaller, larger, stop_point=1000):\n", - " \"\"\"Calculate the next number in the Fibonacci sequence.\n", + " \"\"\"A dynamic workflow that calculates the Fibonacci sequence.\n", "\n", " If the number is larger than stop_point, the job will stop the workflow\n", " execution, otherwise, a new job will be submitted to calculate the next number.\n", @@ -415,15 +431,8 @@ "fibonacci_job = fibonacci(1, 1)\n", "\n", "# run the job; responses will contain the output from all jobs\n", - "responses = run_locally(fibonacci_job)\n" - ], - "metadata": { - "collapsed": false, - "ExecuteTime": { - "end_time": "2023-11-23T22:55:13.426518952Z", - "start_time": "2023-11-23T22:55:13.322421257Z" - } - } + "responses = run_locally(fibonacci_job)" + ] }, { "cell_type": "markdown", @@ -472,11 +481,14 @@ "def add(a, b):\n", " return a + b\n", "\n", + "\n", "@job\n", "def add_with_logic(a, b):\n", " if a < 10:\n", " return Response(detour=add(a, b))\n", - " \n", + " return None\n", + "\n", + "\n", "job1 = add(1, 2)\n", "job2 = add_with_logic(job1.output, 2)\n", "flow = Flow([job1, job2])\n", @@ -497,7 +509,12 @@ "outputs": [ { "data": { - "text/plain": "{'301d75f0-7042-494a-9f24-cab0428c2fd1': {1: Response(output=3, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False)},\n '97be61a8-eec4-4e64-bf53-ba37621575e7': {1: Response(output=None, detour=Flow(name='Flow', uuid='0de995a5-1110-4200-b010-276cb2017474')\n 1. Job(name='add', uuid='d4c31f68-09ad-418a-ac52-89b303fc2a00'), addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False)},\n 'd4c31f68-09ad-418a-ac52-89b303fc2a00': {1: Response(output=5, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False)}}" + "text/plain": [ + "{'301d75f0-7042-494a-9f24-cab0428c2fd1': {1: Response(output=3, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False)},\n", + " '97be61a8-eec4-4e64-bf53-ba37621575e7': {1: Response(output=None, detour=Flow(name='Flow', uuid='0de995a5-1110-4200-b010-276cb2017474')\n", + " 1. Job(name='add', uuid='d4c31f68-09ad-418a-ac52-89b303fc2a00'), addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False)},\n", + " 'd4c31f68-09ad-418a-ac52-89b303fc2a00': {1: Response(output=5, detour=None, addition=None, replace=None, stored_data=None, stop_children=False, stop_jobflow=False)}}" + ] }, "execution_count": 10, "metadata": {}, @@ -519,9 +536,9 @@ ], "metadata": { "kernelspec": { - "name": "python3", + "display_name": "Python 3 (ipykernel)", "language": "python", - "display_name": "Python 3 (ipykernel)" + "name": "python3" }, "language_info": { "codemirror_mode": { diff --git a/docs/tutorials/6-makers.ipynb b/docs/tutorials/6-makers.ipynb index 93c25602..1c19a044 100644 --- a/docs/tutorials/6-makers.ipynb +++ b/docs/tutorials/6-makers.ipynb @@ -61,9 +61,11 @@ ], "source": [ "from dataclasses import dataclass\n", - "from jobflow import job, Flow, Maker\n", + "\n", + "from jobflow import Flow, Maker, job\n", "from jobflow.managers.local import run_locally\n", "\n", + "\n", "@dataclass\n", "class AddMaker(Maker):\n", " name: str = \"Add Maker\"\n", @@ -73,10 +75,9 @@ " def make(self, a, b):\n", " if self.operation == \"add\":\n", " return a + b\n", - " elif self.operation == \"mult\":\n", + " if self.operation == \"mult\":\n", " return a * b\n", - " else:\n", - " raise ValueError(f\"Unknown operation: {self.operation}\")\n", + " raise ValueError(f\"Unknown operation: {self.operation}\")\n", "\n", "\n", "job1 = AddMaker().make(a=2, b=3)\n", @@ -122,9 +123,11 @@ "outputs": [], "source": [ "from dataclasses import dataclass\n", - "from jobflow import job, Flow, Maker\n", + "\n", + "from jobflow import Flow, Maker, job\n", "from jobflow.managers.local import run_locally\n", "\n", + "\n", "@dataclass\n", "class AddMaker(Maker):\n", " name: str = \"Add Maker\"\n", @@ -134,10 +137,10 @@ " def make(self, a, b):\n", " if self.operation == \"add\":\n", " return a + b\n", - " elif self.operation == \"mult\":\n", + " if self.operation == \"mult\":\n", " return a * b\n", - " else:\n", - " raise ValueError(f\"Unknown operation: {self.operation}\")\n", + " raise ValueError(f\"Unknown operation: {self.operation}\")\n", + "\n", "\n", "@dataclass\n", "class SubtractMaker(Maker):\n", @@ -147,6 +150,7 @@ " def make(self, a, b):\n", " return b - a\n", "\n", + "\n", "job1 = AddMaker().make(a=2, b=3)\n", "job2 = SubtractMaker().make(a=job1.output, b=4)\n", "flow = Flow([job1, job2])" diff --git a/docs/tutorials/7-generalized-makers.ipynb b/docs/tutorials/7-generalized-makers.ipynb index 4f9112a4..cd9353cc 100644 --- a/docs/tutorials/7-generalized-makers.ipynb +++ b/docs/tutorials/7-generalized-makers.ipynb @@ -46,7 +46,7 @@ "metadata": {}, "outputs": [], "source": [ - "from jobflow import Maker, job, Flow\n", + "from jobflow import Flow, Maker, job\n", "from jobflow.managers.local import run_locally" ] }, @@ -59,7 +59,7 @@ "source": [ "class BaseMaker(Maker):\n", " def code_specific_func(self, arg1):\n", - " raise NotImplementedError()\n", + " raise NotImplementedError\n", "\n", " def make(self):\n", " return Flow([job1(self.code_specific_func, \"ARG1\")])\n", diff --git a/pyproject.toml b/pyproject.toml index 38903d06..ae659d7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,13 +53,13 @@ fireworks = ["FireWorks"] strict = [ "FireWorks==2.0.3", "PyYAML==6.0.2", - "maggma==0.69.4", + "maggma==0.69.3", "matplotlib==3.9.2", "monty==2024.7.30", "moto==4.2.13", "networkx==3.2.1", "pydantic-settings==2.5.2", - "pydantic==2.9.2", + "pydantic==2.9.1", "pydash==8.0.3", "pydot==2.0.0", "python-ulid==2.7.0", @@ -121,6 +121,7 @@ exclude_lines = [ [tool.ruff] target-version = "py39" +output-format = "concise" [tool.ruff.lint] select = [ @@ -171,7 +172,6 @@ ignore = [ ] pydocstyle.convention = "numpy" isort.known-first-party = ["jobflow"] -ignore-init-module-imports = true [tool.ruff.lint.per-file-ignores] # F401: unused import @@ -180,3 +180,4 @@ ignore-init-module-imports = true # PLR2004: magic-value-comparison # PT004: pytest-missing-fixture-name-underscore "**/tests/*" = ["ANN", "ARG001", "D", "PLR2004", "PT004", "S101"] +"docs/tutorials/*" = ["D", "PLR2004"] diff --git a/src/jobflow/core/flow.py b/src/jobflow/core/flow.py index b2f47183..a74ffe4d 100644 --- a/src/jobflow/core/flow.py +++ b/src/jobflow/core/flow.py @@ -190,7 +190,7 @@ def __sub__(self, other: Flow | Job) -> Flow: if other not in self: raise ValueError(f"{other!r} not found in flow") new_flow = deepcopy(self) - new_flow.jobs = tuple([job for job in new_flow if job != other]) + new_flow.jobs = tuple(job for job in new_flow if job != other) return new_flow def __repr__(self, level: int = 0, prefix: str = "") -> str: diff --git a/src/jobflow/core/maker.py b/src/jobflow/core/maker.py index 8344f67a..f45935d3 100644 --- a/src/jobflow/core/maker.py +++ b/src/jobflow/core/maker.py @@ -261,7 +261,7 @@ def recursive_call( if isinstance(class_filter, Maker): # Maker instance supplied rather than a Maker class - class_filter = class_filter.__class__ + class_filter = class_filter.__class__ # type: ignore[assignment] def _filter(nested_obj: Maker): # Filter the Maker object diff --git a/tests/conftest.py b/tests/conftest.py index 083d0b45..6bd17484 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,7 +26,7 @@ def mongo_jobstore(database): return store -@pytest.fixture() +@pytest.fixture def memory_jobstore(): from maggma.stores import MemoryStore @@ -38,7 +38,7 @@ def memory_jobstore(): return store -@pytest.fixture() +@pytest.fixture def memory_data_jobstore(): from maggma.stores import MemoryStore @@ -50,7 +50,7 @@ def memory_data_jobstore(): return store -@pytest.fixture() +@pytest.fixture def clean_dir(): import os import shutil @@ -85,7 +85,7 @@ def lpad(database, debug_mode): lpad.db[coll].drop() -@pytest.fixture() +@pytest.fixture def no_pydot(monkeypatch): import builtins @@ -99,7 +99,7 @@ def mocked_import(name, *args, **kwargs): monkeypatch.setattr(builtins, "__import__", mocked_import) -@pytest.fixture() +@pytest.fixture def no_matplotlib(monkeypatch): import builtins diff --git a/tests/core/test_maker.py b/tests/core/test_maker.py index cd4f1979..ac5ecb81 100644 --- a/tests/core/test_maker.py +++ b/tests/core/test_maker.py @@ -116,8 +116,7 @@ def test_update_kwargs(): from jobflow.core.maker import Maker # this is needed to get monty to deserialize them correctly - global AddMaker - global DetourMaker + global AddMaker, DetourMaker @dataclass class AddMaker(Maker): @@ -183,8 +182,7 @@ def make(self, a, b): maker = maker.update_kwargs({"c": 10}, class_filter=AddMaker, nested=False) assert maker.add_maker.c == 5 - global NotAMaker - global FakeDetourMaker + global NotAMaker, FakeDetourMaker @dataclass class NotAMaker(MSONable): @@ -217,8 +215,7 @@ def test_recursive_call(): from jobflow.core.maker import Maker, recursive_call # this is needed to get monty to deserialize them correctly - global AddMaker - global DetourMaker + global AddMaker, DetourMaker @dataclass class AddMaker(Maker): diff --git a/tests/core/test_schemas.py b/tests/core/test_schemas.py index 84af8ace..ef51e6a3 100644 --- a/tests/core/test_schemas.py +++ b/tests/core/test_schemas.py @@ -3,7 +3,7 @@ import pytest -@pytest.fixture() +@pytest.fixture def sample_data(): from jobflow.core.schemas import JobStoreDocument diff --git a/tests/core/test_store.py b/tests/core/test_store.py index 2ddd78a7..5c986623 100644 --- a/tests/core/test_store.py +++ b/tests/core/test_store.py @@ -6,7 +6,7 @@ from jobflow.core.store import JobStore -@pytest.fixture() +@pytest.fixture def memory_store(): from maggma.stores import MemoryStore diff --git a/tests/managers/test_fireworks.py b/tests/managers/test_fireworks.py index 71ae2c6b..3b6ad495 100644 --- a/tests/managers/test_fireworks.py +++ b/tests/managers/test_fireworks.py @@ -13,7 +13,7 @@ def test_flow_to_workflow( flow = simple_job() wf = flow_to_workflow(flow, memory_jobstore) - assert type(wf) == Workflow + assert type(wf) is Workflow assert wf.name == "Flow" assert len(wf.fws) == 1 assert wf.fws[0].name == "func" @@ -22,7 +22,7 @@ def test_flow_to_workflow( flow = simple_job() wf = flow_to_workflow(flow, name="custom_name") - assert type(wf) == Workflow + assert type(wf) is Workflow assert wf.name == "custom_name" assert len(wf.fws) == 1 assert wf.fws[0].name == "func" @@ -31,7 +31,7 @@ def test_flow_to_workflow( flow = simple_flow() wf = flow_to_workflow(flow, memory_jobstore) - assert type(wf) == Workflow + assert type(wf) is Workflow assert wf.name == "Flow" assert len(wf.fws) == 1 assert wf.fws[0].name == "func" @@ -40,7 +40,7 @@ def test_flow_to_workflow( flow = connected_flow() wf = flow_to_workflow(flow, memory_jobstore) - assert type(wf) == Workflow + assert type(wf) is Workflow assert wf.name == "Connected Flow" assert len(wf.fws) == 2 assert wf.fws[0].name == "func" @@ -81,7 +81,7 @@ def test_job_to_firework( job = simple_job() fw = job_to_firework(job, memory_jobstore) - assert type(fw) == Firework + assert type(fw) is Firework assert fw.name == "func" job2 = simple_job() @@ -89,7 +89,7 @@ def test_job_to_firework( job2, memory_jobstore, parents=[job.uuid], parent_mapping={job.uuid: 1} ) - assert type(fw) == Firework + assert type(fw) is Firework assert fw.name == "func" with pytest.raises(ValueError, match="Both or neither of"):