diff --git a/.ci_support/release.py b/.ci_support/release.py index a3edb01..cb90330 100644 --- a/.ci_support/release.py +++ b/.ci_support/release.py @@ -1,12 +1,19 @@ def get_setup_version_and_pattern(setup_content): depend_lst, version_lst = [], [] for l in setup_content: - if '==' in l: - lst = l.split('[')[-1].split(']')[0].replace(' ', '').replace('"', '').replace("'", '').split(',') + if "==" in l: + lst = ( + l.split("[")[-1] + .split("]")[0] + .replace(" ", "") + .replace('"', "") + .replace("'", "") + .split(",") + ) for dep in lst: - if dep != '\n': - version_lst.append(dep.split('==')[1]) - depend_lst.append(dep.split('==')[0]) + if dep != "\n": + version_lst.append(dep.split("==")[1]) + depend_lst.append(dep.split("==")[0]) version_high_dict = {d: v for d, v in zip(depend_lst, version_lst)} return version_high_dict @@ -16,14 +23,14 @@ def get_env_version(env_content): read_flag = False depend_lst, version_lst = [], [] for l in env_content: - if 'dependencies:' in l: + if "dependencies:" in l: read_flag = True elif read_flag: - lst = l.replace('-', '').replace(' ', '').replace('\n', '').split("=") + lst = l.replace("-", "").replace(" ", "").replace("\n", "").split("=") if len(lst) == 2: depend_lst.append(lst[0]) version_lst.append(lst[1]) - return {d:v for d, v in zip(depend_lst, version_lst)} + return {d: v for d, v in zip(depend_lst, version_lst)} def update_dependencies(setup_content, version_low_dict, version_high_dict): @@ -35,27 +42,29 @@ def update_dependencies(setup_content, version_low_dict, version_high_dict): version_combo_dict[dep] = dep + "==" + ver setup_content_new = "" - pattern_dict = {d:d + "==" + v for d, v in version_high_dict.items()} + pattern_dict = {d: d + "==" + v for d, v in version_high_dict.items()} for l in setup_content: for k, v in pattern_dict.items(): if v in l: l = l.replace(v, version_combo_dict[k]) - setup_content_new +=l + setup_content_new += l return setup_content_new if __name__ == "__main__": - with open('pyproject.toml', "r") as f: + with open("pyproject.toml", "r") as f: setup_content = f.readlines() - with open('environment.yml', "r") as f: + with open("environment.yml", "r") as f: env_content = f.readlines() setup_content_new = update_dependencies( setup_content=setup_content[2:], version_low_dict=get_env_version(env_content=env_content), - version_high_dict=get_setup_version_and_pattern(setup_content=setup_content[2:]), + version_high_dict=get_setup_version_and_pattern( + setup_content=setup_content[2:] + ), ) - with open('pyproject.toml', "w") as f: + with open("pyproject.toml", "w") as f: f.writelines("".join(setup_content[:2]) + setup_content_new) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..32b39bb --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,10 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.4 + hooks: + - id: ruff + name: ruff lint + args: ["--fix"] + files: ^pysqa/ + - id: ruff-format + name: ruff format diff --git a/pysqa/__init__.py b/pysqa/__init__.py index 44f597d..b01eb50 100644 --- a/pysqa/__init__.py +++ b/pysqa/__init__.py @@ -2,4 +2,5 @@ from ._version import get_versions +__all__ = [QueueAdapter] __version__ = get_versions()["version"] diff --git a/pysqa/executor/__init__.py b/pysqa/executor/__init__.py index 0209218..3a7a85c 100644 --- a/pysqa/executor/__init__.py +++ b/pysqa/executor/__init__.py @@ -1 +1,3 @@ from pysqa.executor.executor import Executor + +__all__ = [Executor] diff --git a/pysqa/wrapper/lsf.py b/pysqa/wrapper/lsf.py index cf04fab..541ed29 100644 --- a/pysqa/wrapper/lsf.py +++ b/pysqa/wrapper/lsf.py @@ -40,8 +40,8 @@ def convert_queue_status(queue_status_output: str) -> pandas.DataFrame: job_id_lst, user_lst, status_lst, job_name_lst = [], [], [], [] line_split_lst = queue_status_output.split("\n") if len(line_split_lst) > 1: - for l in line_split_lst[1:]: - line_segments = l.split() + for line in line_split_lst[1:]: + line_segments = line.split() if len(line_segments) > 1: job_id_lst.append(int(line_segments[0])) user_lst.append(line_segments[1]) diff --git a/setup.py b/setup.py index edb08a3..fcbb31f 100644 --- a/setup.py +++ b/setup.py @@ -5,4 +5,4 @@ setup( version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), -) \ No newline at end of file +) diff --git a/tests/test_basic.py b/tests/test_basic.py index bd4ca7e..a934ae4 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -64,7 +64,7 @@ def test_memory_string_comparison(self): ) self.assertEqual( BasisQueueAdapter._value_in_range( - 90000 * 1024 ** 2, value_min="1K", value_max="70G" + 90000 * 1024**2, value_min="1K", value_max="70G" ), "70G", ) diff --git a/tests/test_cmd.py b/tests/test_cmd.py index 5bdaff2..f084da9 100644 --- a/tests/test_cmd.py +++ b/tests/test_cmd.py @@ -11,12 +11,11 @@ class TestCMD(unittest.TestCase): def setUpClass(cls): cls.test_dir = os.path.abspath(os.path.dirname(__file__)) - @unittest.mock.patch('sys.stdout', new_callable=io.StringIO) - def assert_stdout_command_line(self, cmd_args, execute_command, expected_output, mock_stdout): - command_line( - arguments_lst=cmd_args, - execute_command=execute_command - ) + @unittest.mock.patch("sys.stdout", new_callable=io.StringIO) + def assert_stdout_command_line( + self, cmd_args, execute_command, expected_output, mock_stdout + ): + command_line(arguments_lst=cmd_args, execute_command=execute_command) self.assertEqual(mock_stdout.getvalue(), expected_output) def test_help(self): @@ -45,15 +44,23 @@ def execute_command( self.assert_stdout_command_line( [ - "--config_directory", os.path.join(self.test_dir, "config", "slurm"), + "--config_directory", + os.path.join(self.test_dir, "config", "slurm"), "--submit", - "--queue", "slurm", - "--job_name", "test", - "--working_directory", ".", - "--cores", "2", - "--memory", "1GB", - "--run_time", "10", - "--command", "echo hello" + "--queue", + "slurm", + "--job_name", + "test", + "--working_directory", + ".", + "--cores", + "2", + "--memory", + "1GB", + "--run_time", + "10", + "--command", + "echo hello", ], execute_command, "1\n", @@ -61,17 +68,17 @@ def execute_command( with open("run_queue.sh") as f: output = f.readlines() content = [ - '#!/bin/bash\n', - '#SBATCH --output=time.out\n', - '#SBATCH --job-name=test\n', - '#SBATCH --chdir=.\n', - '#SBATCH --get-user-env=L\n', - '#SBATCH --partition=slurm\n', - '#SBATCH --time=4320\n', - '#SBATCH --mem=1GBG\n', - '#SBATCH --cpus-per-task=10\n', - '\n', - 'echo hello' + "#!/bin/bash\n", + "#SBATCH --output=time.out\n", + "#SBATCH --job-name=test\n", + "#SBATCH --chdir=.\n", + "#SBATCH --get-user-env=L\n", + "#SBATCH --partition=slurm\n", + "#SBATCH --time=4320\n", + "#SBATCH --mem=1GBG\n", + "#SBATCH --cpus-per-task=10\n", + "\n", + "echo hello", ] self.assertEqual(output, content) os.remove("run_queue.sh") @@ -88,12 +95,14 @@ def execute_command( self.assert_stdout_command_line( [ - "--config_directory", os.path.join(self.test_dir, "config", "slurm"), + "--config_directory", + os.path.join(self.test_dir, "config", "slurm"), "--delete", - "--id", "1" + "--id", + "1", ], execute_command, - "S\n" + "S\n", ) def test_status(self): @@ -104,27 +113,40 @@ def execute_command( shell=False, error_filename="pysqa.err", ): - with open(os.path.join(self.test_dir, "config", "slurm", "squeue_output")) as f: + with open( + os.path.join(self.test_dir, "config", "slurm", "squeue_output") + ) as f: return f.read() self.assert_stdout_command_line( [ - "--config_directory", os.path.join(self.test_dir, "config", "slurm"), - "--status" + "--config_directory", + os.path.join(self.test_dir, "config", "slurm"), + "--status", ], execute_command, - json.dumps({ - "jobid": [5322019, 5322016, 5322017, 5322018, 5322013], "user": ["janj", "janj", "janj", "janj", "maxi"], - "jobname": ["pi_19576488", "pi_19576485", "pi_19576486", "pi_19576487", "pi_19576482"], - "status": ["running", "running", "running", "running", "running"], - "working_directory": [ - "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_1", - "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_2", - "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_3", - "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_4", - "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_5" - ] - }) +"\n" + json.dumps( + { + "jobid": [5322019, 5322016, 5322017, 5322018, 5322013], + "user": ["janj", "janj", "janj", "janj", "maxi"], + "jobname": [ + "pi_19576488", + "pi_19576485", + "pi_19576486", + "pi_19576487", + "pi_19576482", + ], + "status": ["running", "running", "running", "running", "running"], + "working_directory": [ + "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_1", + "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_2", + "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_3", + "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_4", + "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_5", + ], + } + ) + + "\n", ) def test_list(self): @@ -139,19 +161,31 @@ def execute_command( self.assert_stdout_command_line( [ - "--config_directory", os.path.join(self.test_dir, "config", "slurm"), + "--config_directory", + os.path.join(self.test_dir, "config", "slurm"), "--list", - "--working_directory", os.path.join(self.test_dir, "config", "slurm"), - + "--working_directory", + os.path.join(self.test_dir, "config", "slurm"), ], execute_command, - json.dumps({ - "dirs": [os.path.join(self.test_dir, "config", "slurm")], - "files": sorted([ - os.path.join(self.test_dir, "config", "slurm", "squeue_output"), - os.path.join(self.test_dir, "config", "slurm", "slurm_extra.sh"), - os.path.join(self.test_dir, "config", "slurm", "slurm.sh"), - os.path.join(self.test_dir, "config", "slurm", "queue.yaml"), - ]) - }) + "\n" + json.dumps( + { + "dirs": [os.path.join(self.test_dir, "config", "slurm")], + "files": sorted( + [ + os.path.join( + self.test_dir, "config", "slurm", "squeue_output" + ), + os.path.join( + self.test_dir, "config", "slurm", "slurm_extra.sh" + ), + os.path.join(self.test_dir, "config", "slurm", "slurm.sh"), + os.path.join( + self.test_dir, "config", "slurm", "queue.yaml" + ), + ] + ), + } + ) + + "\n", ) diff --git a/tests/test_execute_command.py b/tests/test_execute_command.py index 7b3ac1d..7bcb8d4 100644 --- a/tests/test_execute_command.py +++ b/tests/test_execute_command.py @@ -10,9 +10,9 @@ def test_commands_as_lst(self): working_directory=".", split_output=True, shell=False, - error_filename="pysqa.err" + error_filename="pysqa.err", ) - self.assertEqual(output, ['hello', '']) + self.assertEqual(output, ["hello", ""]) def test_commands_as_lst_no_split(self): output = execute_command( @@ -20,9 +20,9 @@ def test_commands_as_lst_no_split(self): working_directory=".", split_output=False, shell=False, - error_filename="pysqa.err" + error_filename="pysqa.err", ) - self.assertEqual(output, 'hello\n') + self.assertEqual(output, "hello\n") def test_commands_as_lst_shell_true(self): output = execute_command( @@ -30,9 +30,9 @@ def test_commands_as_lst_shell_true(self): working_directory=".", split_output=True, shell=True, - error_filename="pysqa.err" + error_filename="pysqa.err", ) - self.assertEqual(output, ['hello', '']) + self.assertEqual(output, ["hello", ""]) def test_commands_as_str(self): output = execute_command( @@ -40,9 +40,9 @@ def test_commands_as_str(self): working_directory=".", split_output=True, shell=False, - error_filename="pysqa.err" + error_filename="pysqa.err", ) - self.assertEqual(output, ['hello', '']) + self.assertEqual(output, ["hello", ""]) def test_commands_fails(self): output = execute_command( @@ -50,7 +50,7 @@ def test_commands_fails(self): working_directory=".", split_output=True, shell=False, - error_filename="pysqa_fails.err" + error_filename="pysqa_fails.err", ) self.assertIsNone(output) with open("pysqa_fails.err") as f: diff --git a/tests/test_executor.py b/tests/test_executor.py index 397e3c0..664772d 100644 --- a/tests/test_executor.py +++ b/tests/test_executor.py @@ -20,13 +20,15 @@ def funct_add(a, b): - return a+b + return a + b -@unittest.skipIf(os.name == 'nt', "Runs forever on Windows") +@unittest.skipIf(os.name == "nt", "Runs forever on Windows") class TestExecutorHelper(unittest.TestCase): def setUp(self): - self.test_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "cache") + self.test_dir = os.path.join( + os.path.abspath(os.path.dirname(__file__)), "cache" + ) os.makedirs(self.test_dir, exist_ok=True) def tearDown(self): @@ -37,14 +39,10 @@ def tearDown(self): def test_cache(self): funct_dict = serialize_funct(fn=funct_add, a=1, b=2) file_name_in = write_to_file( - funct_dict=funct_dict, - state="in", - cache_directory=self.test_dir + funct_dict=funct_dict, state="in", cache_directory=self.test_dir )[0] self.assertEqual(len(os.listdir(self.test_dir)), 1) - funct_dict = read_from_file( - file_name=os.path.join(self.test_dir, file_name_in) - ) + funct_dict = read_from_file(file_name=os.path.join(self.test_dir, file_name_in)) apply_dict = deserialize(funct_dict=funct_dict) key = list(apply_dict.keys())[0] v = apply_dict[key] @@ -56,48 +54,41 @@ def test_cache(self): )[0] self.assertEqual(len(os.listdir(self.test_dir)), 2) f = Future() - _set_future( - file_name=os.path.join(self.test_dir, file_name_out), - future=f - ) + _set_future(file_name=os.path.join(self.test_dir, file_name_out), future=f) self.assertEqual(f.result(), 3) task_dict = {key: Future()} _update_task_dict( - task_dict=task_dict, - task_memory_dict={}, - cache_directory=self.test_dir + task_dict=task_dict, task_memory_dict={}, cache_directory=self.test_dir ) self.assertEqual(task_dict[key].result(), 3) def test_reload_previous_future(self): funct_dict = serialize_funct(fn=funct_add, a=1, b=2) file_name_in = write_to_file( - funct_dict=funct_dict, - state="in", - cache_directory=self.test_dir + funct_dict=funct_dict, state="in", cache_directory=self.test_dir )[0] queue = Queue() future_dict_one = {} reload_previous_futures( future_queue=queue, future_dict=future_dict_one, - cache_directory=self.test_dir + cache_directory=self.test_dir, ) self.assertEqual(len(future_dict_one), 1) - self.assertEqual(list(future_dict_one.keys())[0], file_name_in.split(".in.pl")[0]) + self.assertEqual( + list(future_dict_one.keys())[0], file_name_in.split(".in.pl")[0] + ) self.assertEqual(len(os.listdir(self.test_dir)), 1) with ThreadPoolExecutor() as exe: execute_files_from_list( - tasks_in_progress_dict={}, - cache_directory=self.test_dir, - executor=exe + tasks_in_progress_dict={}, cache_directory=self.test_dir, executor=exe ) self.assertEqual(len(os.listdir(self.test_dir)), 2) future_dict_two = {} reload_previous_futures( future_queue=queue, future_dict=future_dict_two, - cache_directory=self.test_dir + cache_directory=self.test_dir, ) key = list(future_dict_two.keys())[0] self.assertEqual(len(future_dict_two), 1) @@ -107,7 +98,9 @@ def test_reload_previous_future(self): class TestExecutor(unittest.TestCase): def setUp(self): - self.test_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "executor_cache") + self.test_dir = os.path.join( + os.path.abspath(os.path.dirname(__file__)), "executor_cache" + ) os.makedirs(self.test_dir, exist_ok=True) def tearDown(self): @@ -117,26 +110,22 @@ def tearDown(self): def test_executor(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): return str(1) queue_adapter = QueueAdapter( directory=os.path.join(self.test_dir, "../config/slurm"), - execute_command=execute_command + execute_command=execute_command, ) with Executor( cwd=self.test_dir, queue_adapter=queue_adapter, - queue_adapter_kwargs={ - "queue": "slurm", - "job_name": "test", - "cores": 1 - } + queue_adapter_kwargs={"queue": "slurm", "job_name": "test", "cores": 1}, ) as exe: fs = exe.submit(fn=funct_add, a=1, b=2) funct_dict = serialize_funct(fn=funct_add, a=1, b=2) @@ -146,7 +135,8 @@ def execute_command( ) apply_dict = deserialize(funct_dict=funct_dict) result_dict = { - k: v["fn"].__call__(*v["args"], **v["kwargs"]) for k, v in apply_dict.items() + k: v["fn"].__call__(*v["args"], **v["kwargs"]) + for k, v in apply_dict.items() } _ = write_to_file( funct_dict=serialize_result(result_dict=result_dict), diff --git a/tests/test_flux.py b/tests/test_flux.py index 651a99e..7b9ec5d 100644 --- a/tests/test_flux.py +++ b/tests/test_flux.py @@ -11,12 +11,15 @@ try: import flux + skip_flux_test = False except ImportError: skip_flux_test = True -@unittest.skipIf(skip_flux_test, "Flux is not installed, so the flux tests are skipped.") +@unittest.skipIf( + skip_flux_test, "Flux is not installed, so the flux tests are skipped." +) class TestFluxQueueAdapter(unittest.TestCase): @classmethod def setUpClass(cls): @@ -28,7 +31,7 @@ def test_config(self): self.assertEqual(self.flux.config["queue_primary"], "flux") def test_list_clusters(self): - self.assertEqual(self.flux.list_clusters(), ['default']) + self.assertEqual(self.flux.list_clusters(), ["default"]) def test_remote_flag(self): self.assertFalse(self.flux._adapter.remote_flag) @@ -40,7 +43,9 @@ def test_interfaces(self): self.assertEqual( self.flux._adapter._commands.submit_job_command, ["flux", "batch"] ) - self.assertEqual(self.flux._adapter._commands.delete_job_command, ["flux", "cancel"]) + self.assertEqual( + self.flux._adapter._commands.delete_job_command, ["flux", "cancel"] + ) self.assertEqual( self.flux._adapter._commands.get_queue_status_command, ["flux", "jobs", "-a", "--no-header"], @@ -49,37 +54,46 @@ def test_interfaces(self): def test_convert_queue_status_slurm(self): with open(os.path.join(self.path, "config/flux", "flux_jobs"), "r") as f: content = f.read() - df = pandas.DataFrame({ - "jobid": [1125147213824, 1109007532032, 1092532305920], - "user": ["dahn", "dahn", "dahn"], - "jobname": ["sleep_batc", "sleep_batc", "sleep_batc"], - "status": ["running", "running", "running"] - }) - self.assertTrue(df.equals(self.flux._adapter._commands.convert_queue_status( - queue_status_output=content - ))) + df = pandas.DataFrame( + { + "jobid": [1125147213824, 1109007532032, 1092532305920], + "user": ["dahn", "dahn", "dahn"], + "jobname": ["sleep_batc", "sleep_batc", "sleep_batc"], + "status": ["running", "running", "running"], + } + ) + self.assertTrue( + df.equals( + self.flux._adapter._commands.convert_queue_status( + queue_status_output=content + ) + ) + ) def test_submit_job(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): return "ƒWZEQa8X\n" flux_tmp = QueueAdapter( directory=os.path.join(self.path, "config/flux"), - execute_command=execute_command + execute_command=execute_command, + ) + self.assertEqual( + flux_tmp.submit_job( + queue="flux", + job_name="test", + working_directory=".", + cores=4, + command="echo hello", + ), + 1125147213824, ) - self.assertEqual(flux_tmp.submit_job( - queue="flux", - job_name="test", - working_directory=".", - cores=4, - command="echo hello" - ), 1125147213824) with open("run_queue.sh") as f: output = f.read() content = """\ @@ -101,8 +115,8 @@ def test_flux_integration(self): job_name="test", working_directory=".", cores=1, - command="sleep 1" + command="sleep 1", ) - self.assertEqual(self.flux.get_status_of_job(process_id=job_id), 'running') + self.assertEqual(self.flux.get_status_of_job(process_id=job_id), "running") self.flux.delete_job(process_id=job_id) - self.assertEqual(self.flux.get_status_of_job(process_id=job_id), 'error') + self.assertEqual(self.flux.get_status_of_job(process_id=job_id), "error") diff --git a/tests/test_gent.py b/tests/test_gent.py index 9105e7a..b4d5a5e 100644 --- a/tests/test_gent.py +++ b/tests/test_gent.py @@ -19,11 +19,18 @@ "cluster": ["Mycluster", "Mycluster", "Mycluster", "Mycluster", "Mycluster"], "jobid": [5322019, 5322016, 5322017, 5322018, 5322013], "user": ["janj", "janj", "janj", "janj", "janj"], - "jobname": ["pi_19576488", "pi_19576485", "pi_19576486", "pi_19576487", "pi_19576482"], + "jobname": [ + "pi_19576488", + "pi_19576485", + "pi_19576486", + "pi_19576487", + "pi_19576482", + ], "status": ["r", "r", "r", "r", "r"], } ) + class TestGentQueueAdapter(unittest.TestCase): @classmethod def setUpClass(cls): @@ -35,7 +42,7 @@ def test_config(self): self.assertEqual(self.gent.config["queue_primary"], "slurm") def test_list_clusters(self): - self.assertEqual(self.gent.list_clusters(), ['default']) + self.assertEqual(self.gent.list_clusters(), ["default"]) def test_ssh_delete_file_on_remote(self): self.assertEqual(self.gent.ssh_delete_file_on_remote, True) @@ -61,10 +68,14 @@ def test__list_command_to_be_executed(self): ) def test_get_job_id_from_output(self): - self.assertEqual(self.gent._adapter._commands.get_job_id_from_output("123;MyQueue"), 123) + self.assertEqual( + self.gent._adapter._commands.get_job_id_from_output("123;MyQueue"), 123 + ) def test_get_queue_from_output(self): - self.assertEqual(self.gent._adapter._commands.get_queue_from_output("123;MyQueue"), "MyQueue") + self.assertEqual( + self.gent._adapter._commands.get_queue_from_output("123;MyQueue"), "MyQueue" + ) def test_convert_queue_status_slurm(self): with open(os.path.join(self.path, "config/gent", "gent_output"), "r") as f: @@ -80,125 +91,136 @@ def test_convert_queue_status_slurm(self): def test_switch_cluster_command(self): self.assertEqual( self.gent._adapter._switch_cluster_command(cluster_module="module1"), - ['module', '--quiet', 'swap', 'cluster/module1;'] + ["module", "--quiet", "swap", "cluster/module1;"], ) def test_resolve_queue_id(self): self.assertEqual( - self.gent._adapter._resolve_queue_id(process_id=20120, cluster_dict={0: "cluster1"}), - ("cluster1", 2012) + self.gent._adapter._resolve_queue_id( + process_id=20120, cluster_dict={0: "cluster1"} + ), + ("cluster1", 2012), ) def test_submit_job_no_output(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): pass gent_tmp = QueueAdapter( directory=os.path.join(self.path, "config/gent"), - execute_command=execute_command - ) - self.assertIsNone(gent_tmp.submit_job( - queue="slurm", - job_name="test", - working_directory=".", - command="echo hello" - )) + execute_command=execute_command, + ) + self.assertIsNone( + gent_tmp.submit_job( + queue="slurm", + job_name="test", + working_directory=".", + command="echo hello", + ) + ) os.remove("run_queue.sh") def test_submit_job_with_output(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): return "123;cluster0" gent_tmp = QueueAdapter( directory=os.path.join(self.path, "config/gent"), - execute_command=execute_command - ) - self.assertEqual(gent_tmp.submit_job( - queue="slurm", - job_name="test", - working_directory=".", - command="echo hello" - ), 1230) + execute_command=execute_command, + ) + self.assertEqual( + gent_tmp.submit_job( + queue="slurm", + job_name="test", + working_directory=".", + command="echo hello", + ), + 1230, + ) os.remove("run_queue.sh") def test_delete_job_no_output(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): pass gent_tmp = QueueAdapter( directory=os.path.join(self.path, "config/gent"), - execute_command=execute_command + execute_command=execute_command, ) self.assertIsNone(gent_tmp.delete_job(process_id=1)) def test_delete_job_with_output(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): return 0, 1 gent_tmp = QueueAdapter( directory=os.path.join(self.path, "config/gent"), - execute_command=execute_command + execute_command=execute_command, ) self.assertEqual(gent_tmp.delete_job(process_id=1), 0) def test_get_queue_status(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): with open(os.path.join(self.path, "config", "gent", "gent_output")) as f: return f.read() gent_tmp = QueueAdapter( directory=os.path.join(self.path, "config/gent"), - execute_command=execute_command + execute_command=execute_command, ) - self.assertTrue(pandas.concat([df_queue_status] * 3).reset_index(drop=True).equals( - gent_tmp.get_queue_status()) + self.assertTrue( + pandas.concat([df_queue_status] * 3) + .reset_index(drop=True) + .equals(gent_tmp.get_queue_status()) ) def test_get_queue_status_user(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): with open(os.path.join(self.path, "config", "gent", "gent_output")) as f: return f.read() gent_tmp = QueueAdapter( directory=os.path.join(self.path, "config/gent"), - execute_command=execute_command + execute_command=execute_command, ) - self.assertTrue(pandas.concat([df_queue_status] * 3).reset_index(drop=True).equals( - gent_tmp.get_queue_status(user="janj")) + self.assertTrue( + pandas.concat([df_queue_status] * 3) + .reset_index(drop=True) + .equals(gent_tmp.get_queue_status(user="janj")) ) diff --git a/tests/test_lsf.py b/tests/test_lsf.py index 5643e95..da1ca61 100644 --- a/tests/test_lsf.py +++ b/tests/test_lsf.py @@ -28,7 +28,7 @@ def test_config(self): self.assertEqual(self.lsf.config["queue_primary"], "lsf") def test_list_clusters(self): - self.assertEqual(self.lsf.list_clusters(), ['default']) + self.assertEqual(self.lsf.list_clusters(), ["default"]) def test_ssh_delete_file_on_remote(self): self.assertEqual(self.lsf.ssh_delete_file_on_remote, True) @@ -44,9 +44,7 @@ def test_job_submission_template(self): ) def test_interfaces(self): - self.assertEqual( - self.lsf._adapter._commands.submit_job_command, ["bsub"] - ) + self.assertEqual(self.lsf._adapter._commands.submit_job_command, ["bsub"]) self.assertEqual(self.lsf._adapter._commands.delete_job_command, ["bkill"]) self.assertEqual( self.lsf._adapter._commands.get_queue_status_command, ["bjobs"] @@ -69,12 +67,14 @@ def test__list_command_to_be_executed(self): def test_convert_queue_status_sge(self): with open(os.path.join(self.path, "config/lsf", "bjobs_output"), "r") as f: content = f.read() - df = pandas.DataFrame({ - "jobid": [5136563, 5136570, 5136571], - "user": ["testuse"] * 3, - "jobname": ["pi_None"] * 3, - "status": ["running"] * 3 - }) + df = pandas.DataFrame( + { + "jobid": [5136563, 5136570, 5136571], + "user": ["testuse"] * 3, + "jobname": ["pi_None"] * 3, + "status": ["running"] * 3, + } + ) self.assertTrue( df.equals( self.lsf._adapter._commands.convert_queue_status( diff --git a/tests/test_moab.py b/tests/test_moab.py index eb4acb8..303e9d9 100644 --- a/tests/test_moab.py +++ b/tests/test_moab.py @@ -25,7 +25,7 @@ def test_config(self): self.assertEqual(self.moab.config["queue_primary"], "moab") def test_list_clusters(self): - self.assertEqual(self.moab.list_clusters(), ['default']) + self.assertEqual(self.moab.list_clusters(), ["default"]) def test_ssh_delete_file_on_remote(self): self.assertEqual(self.moab.ssh_delete_file_on_remote, True) diff --git a/tests/test_multi.py b/tests/test_multi.py index b892519..966dad0 100644 --- a/tests/test_multi.py +++ b/tests/test_multi.py @@ -8,6 +8,7 @@ try: import paramiko from tqdm import tqdm + skip_multi_test = False except ImportError: skip_multi_test = True @@ -22,16 +23,21 @@ __date__ = "Feb 9, 2019" -@unittest.skipIf(skip_multi_test, "Either paramiko or tqdm are not installed, so the multi queue adapter tests are skipped.") +@unittest.skipIf( + skip_multi_test, + "Either paramiko or tqdm are not installed, so the multi queue adapter tests are skipped.", +) class TestMultiQueueAdapter(unittest.TestCase): @classmethod def setUpClass(cls): cls.path = os.path.dirname(os.path.abspath(__file__)) - cls.multi = QueueAdapter(directory=os.path.join(cls.path, "config/multicluster")) + cls.multi = QueueAdapter( + directory=os.path.join(cls.path, "config/multicluster") + ) def test_config(self): self.assertEqual(self.multi.config["queue_type"], "SLURM") self.assertEqual(self.multi.config["queue_primary"], "slurm") def test_list_clusters(self): - self.assertEqual(self.multi.list_clusters(), ['local_slurm', 'remote_slurm']) + self.assertEqual(self.multi.list_clusters(), ["local_slurm", "remote_slurm"]) diff --git a/tests/test_remote.py b/tests/test_remote.py index 54d30c6..65d2fdf 100644 --- a/tests/test_remote.py +++ b/tests/test_remote.py @@ -8,6 +8,7 @@ try: import paramiko from tqdm import tqdm + skip_remote_test = False except ImportError: skip_remote_test = True @@ -22,20 +23,25 @@ __date__ = "Feb 9, 2019" -@unittest.skipIf(skip_remote_test, "Either paramiko or tqdm are not installed, so the remote queue adapter tests are skipped.") +@unittest.skipIf( + skip_remote_test, + "Either paramiko or tqdm are not installed, so the remote queue adapter tests are skipped.", +) class TestRemoteQueueAdapter(unittest.TestCase): @classmethod def setUpClass(cls): cls.path = os.path.dirname(os.path.abspath(__file__)) cls.remote = QueueAdapter(directory=os.path.join(cls.path, "config/remote")) - cls.remote_alternative = QueueAdapter(directory=os.path.join(cls.path, "config/remote_alternative")) + cls.remote_alternative = QueueAdapter( + directory=os.path.join(cls.path, "config/remote_alternative") + ) def test_config(self): self.assertEqual(self.remote.config["queue_type"], "REMOTE") self.assertEqual(self.remote.config["queue_primary"], "remote") def test_list_clusters(self): - self.assertEqual(self.remote.list_clusters(), ['default']) + self.assertEqual(self.remote.list_clusters(), ["default"]) def test_remote_flag(self): self.assertTrue(self.remote._adapter.remote_flag) @@ -45,7 +51,9 @@ def test_ssh_delete_file_on_remote(self): def test_ssh_continous_connection(self): self.assertEqual(self.remote._adapter._ssh_continous_connection, True) - self.assertEqual(self.remote_alternative._adapter._ssh_continous_connection, False) + self.assertEqual( + self.remote_alternative._adapter._ssh_continous_connection, False + ) def test_submit_job_remote(self): with self.assertRaises(NotImplementedError): @@ -63,26 +71,32 @@ def test_submit_command(self): ) self.assertEqual( command, - 'python -m pysqa --config_directory /u/share/pysqa/resources/queues/ --submit --queue remote --job_name test --working_directory /home/localuser/projects/test --cores 1 --memory 1 --run_time 1 --command "/bin/true" ' + 'python -m pysqa --config_directory /u/share/pysqa/resources/queues/ --submit --queue remote --job_name test --working_directory /home/localuser/projects/test --cores 1 --memory 1 --run_time 1 --command "/bin/true" ', ) def test_get_queue_status_command(self): command = self.remote._adapter._get_queue_status_command() - self.assertEqual(command, "python -m pysqa --config_directory /u/share/pysqa/resources/queues/ --status") + self.assertEqual( + command, + "python -m pysqa --config_directory /u/share/pysqa/resources/queues/ --status", + ) def test_convert_path_to_remote(self): - self.assertEqual(self.remote.convert_path_to_remote("/home/localuser/projects/test"), "/u/hpcuser/remote/test") + self.assertEqual( + self.remote.convert_path_to_remote("/home/localuser/projects/test"), + "/u/hpcuser/remote/test", + ) def test_delete_command(self): self.assertEqual( "python -m pysqa --config_directory /u/share/pysqa/resources/queues/ --delete --id 123", - self.remote._adapter._delete_command(job_id=123) + self.remote._adapter._delete_command(job_id=123), ) def test_reservation_command(self): self.assertEqual( "python -m pysqa --config_directory /u/share/pysqa/resources/queues/ --reservation --id 123", - self.remote._adapter._reservation_command(job_id=123) + self.remote._adapter._reservation_command(job_id=123), ) def test_get_ssh_user(self): @@ -90,6 +104,8 @@ def test_get_ssh_user(self): def test_get_file_transfer(self): self.assertEqual( - self.remote._adapter._get_file_transfer(file="abc.txt", local_dir="local", remote_dir="test"), - os.path.abspath("abc.txt") - ) \ No newline at end of file + self.remote._adapter._get_file_transfer( + file="abc.txt", local_dir="local", remote_dir="test" + ), + os.path.abspath("abc.txt"), + ) diff --git a/tests/test_scheduler_commands.py b/tests/test_scheduler_commands.py index 4b06d4a..3326fce 100644 --- a/tests/test_scheduler_commands.py +++ b/tests/test_scheduler_commands.py @@ -32,7 +32,6 @@ def test_convert_queue_status(self): def test_no_delete_job_command(self): class NoDelteteSchedularCommands(SchedulerCommands): - def get_queue_status_command(self): pass diff --git a/tests/test_sge.py b/tests/test_sge.py index 5def1b4..9b1b53f 100644 --- a/tests/test_sge.py +++ b/tests/test_sge.py @@ -9,6 +9,7 @@ try: import defusedxml.ElementTree as ETree + skip_sge_test = False except ImportError: skip_sge_test = True @@ -22,7 +23,10 @@ __date__ = "Feb 9, 2019" -@unittest.skipIf(skip_sge_test, "defusedxml is not installed, so the sun grid engine (SGE) tests are skipped.") +@unittest.skipIf( + skip_sge_test, + "defusedxml is not installed, so the sun grid engine (SGE) tests are skipped.", +) class TestSGEQueueAdapter(unittest.TestCase): @classmethod def setUpClass(cls): @@ -34,7 +38,7 @@ def test_config(self): self.assertEqual(self.sge.config["queue_primary"], "impi_hydra_small") def test_list_clusters(self): - self.assertEqual(self.sge.list_clusters(), ['default']) + self.assertEqual(self.sge.list_clusters(), ["default"]) def test_ssh_delete_file_on_remote(self): self.assertEqual(self.sge.ssh_delete_file_on_remote, True) diff --git a/tests/test_slurm.py b/tests/test_slurm.py index db71781..7ae67f7 100644 --- a/tests/test_slurm.py +++ b/tests/test_slurm.py @@ -20,7 +20,13 @@ { "jobid": [5322019, 5322016, 5322017, 5322018, 5322013], "user": ["janj", "janj", "janj", "janj", "maxi"], - "jobname": ["pi_19576488", "pi_19576485", "pi_19576486", "pi_19576487", "pi_19576482"], + "jobname": [ + "pi_19576488", + "pi_19576485", + "pi_19576486", + "pi_19576487", + "pi_19576482", + ], "status": ["running", "running", "running", "running", "running"], "working_directory": [ "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_1", @@ -28,7 +34,7 @@ "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_3", "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_4", "/cmmc/u/janj/pyiron/projects/2023/2023-04-19-dft-test/job_5", - ] + ], } ) @@ -44,7 +50,7 @@ def test_config(self): self.assertEqual(self.slurm.config["queue_primary"], "slurm") def test_list_clusters(self): - self.assertEqual(self.slurm.list_clusters(), ['default']) + self.assertEqual(self.slurm.list_clusters(), ["default"]) def test_remote_flag(self): self.assertFalse(self.slurm._adapter.remote_flag) @@ -109,7 +115,7 @@ def test_write_queue(self): cores=None, memory_max=None, run_time_max=None, - command=None + command=None, ) self.slurm._adapter._write_queue_script( queue="slurm", @@ -118,7 +124,7 @@ def test_write_queue(self): cores=None, memory_max=None, run_time_max=None, - command="echo \"hello\"" + command='echo "hello"', ) with open("run_queue.sh", "r") as f: content = f.read() @@ -144,8 +150,8 @@ def test_write_queue_extra_keywords(self): cores=None, memory_max=None, run_time_max=None, - command="echo \"hello\"", - account="123456" + command='echo "hello"', + account="123456", ) with open("run_queue.sh", "r") as f: content = f.read() @@ -166,24 +172,26 @@ def test_write_queue_extra_keywords(self): def test_no_queue_id_returned(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): pass slurm_tmp = QueueAdapter( directory=os.path.join(self.path, "config/slurm"), - execute_command=execute_command + execute_command=execute_command, + ) + self.assertIsNone( + slurm_tmp.submit_job( + queue="slurm", + job_name="test", + working_directory=".", + command="echo hello", + ) ) - self.assertIsNone(slurm_tmp.submit_job( - queue="slurm", - job_name="test", - working_directory=".", - command="echo hello" - )) self.assertIsNone(slurm_tmp.delete_job(process_id=123)) def test_queue_status(self): @@ -199,38 +207,43 @@ def execute_command( slurm_tmp = QueueAdapter( directory=os.path.join(self.path, "config/slurm"), - execute_command=execute_command - ) - self.assertTrue( - df_queue_status.equals(slurm_tmp.get_queue_status()) + execute_command=execute_command, ) + self.assertTrue(df_queue_status.equals(slurm_tmp.get_queue_status())) self.assertTrue( - df_queue_status[df_queue_status.user=="janj"].equals(slurm_tmp.get_queue_status(user="janj")) + df_queue_status[df_queue_status.user == "janj"].equals( + slurm_tmp.get_queue_status(user="janj") + ) ) self.assertEqual(slurm_tmp.get_status_of_job(process_id=5322019), "running") self.assertIsNone(slurm_tmp.get_status_of_job(process_id=0)) - self.assertEqual(slurm_tmp.get_status_of_jobs(process_id_lst=[5322019, 0]), ["running", "finished"]) + self.assertEqual( + slurm_tmp.get_status_of_jobs(process_id_lst=[5322019, 0]), + ["running", "finished"], + ) def test_not_implemented_functions(self): def execute_command( - commands, - working_directory=None, - split_output=True, - shell=False, - error_filename="pysqa.err", + commands, + working_directory=None, + split_output=True, + shell=False, + error_filename="pysqa.err", ): pass slurm_tmp = QueueAdapter( directory=os.path.join(self.path, "config/slurm"), - execute_command=execute_command + execute_command=execute_command, ) with self.assertRaises(NotImplementedError): slurm_tmp._adapter.convert_path_to_remote(path="test") with self.assertRaises(NotImplementedError): - slurm_tmp._adapter.transfer_file(file="test", transfer_back=False, delete_file_on_remote=False) + slurm_tmp._adapter.transfer_file( + file="test", transfer_back=False, delete_file_on_remote=False + ) with self.assertRaises(NotImplementedError): slurm_tmp._adapter.get_job_from_remote(working_directory=".") diff --git a/tests/test_torque.py b/tests/test_torque.py index 7d549ba..cdbe1a5 100644 --- a/tests/test_torque.py +++ b/tests/test_torque.py @@ -26,15 +26,13 @@ def test_config(self): self.assertEqual(self.torque.config["queue_primary"], "torque") def test_list_clusters(self): - self.assertEqual(self.torque.list_clusters(), ['default']) + self.assertEqual(self.torque.list_clusters(), ["default"]) def test_ssh_delete_file_on_remote(self): self.assertEqual(self.torque.ssh_delete_file_on_remote, True) def test_interfaces(self): - self.assertEqual( - self.torque._adapter._commands.submit_job_command, ["qsub"] - ) + self.assertEqual(self.torque._adapter._commands.submit_job_command, ["qsub"]) self.assertEqual(self.torque._adapter._commands.delete_job_command, ["qdel"]) self.assertEqual( self.torque._adapter._commands.get_queue_status_command, ["qstat", "-f"] @@ -55,17 +53,25 @@ def test__list_command_to_be_executed(self): ) def test_convert_queue_status_torque(self): - with open(os.path.join(self.path, "config/torque", "PBSPro_qsub_output"), "r") as f: + with open( + os.path.join(self.path, "config/torque", "PBSPro_qsub_output"), "r" + ) as f: content = f.read() df_verify = pandas.DataFrame( { "jobid": [80005196, 80005197, 80005198], "user": ["asd562", "asd562", "fgh562"], - "jobname": ["test1", "test2", "test_asdfasdfasdfasdfasdfasdfasdfasdfasdfasdf"], + "jobname": [ + "test1", + "test2", + "test_asdfasdfasdfasdfasdfasdfasdfasdfasdfasdf", + ], "status": ["running", "pending", "pending"], - "working_directory": ["/scratch/a01/asd562/VASP/test/test1", \ - "/scratch/a01/asd562/VASP/test/test2", \ - "/scratch/a01/fgh562/VASP/test/test_asdfasdfasdfasdfasdfasdfasdfasdfasdfasdf"] + "working_directory": [ + "/scratch/a01/asd562/VASP/test/test1", + "/scratch/a01/asd562/VASP/test/test2", + "/scratch/a01/fgh562/VASP/test/test_asdfasdfasdfasdfasdfasdfasdfasdfasdfasdf", + ], } ) self.assertTrue(