From f4c46d3e39c7734c57d0c17d2f7c89e2e5559e44 Mon Sep 17 00:00:00 2001 From: Matthew Evans <git@ml-evs.science> Date: Tue, 20 Feb 2024 14:41:37 +0000 Subject: [PATCH] Dial back the size of test data in the GridFS test --- src/jobflow_remote/testing/__init__.py | 12 ++++-------- tests/integration/conftest.py | 2 +- tests/integration/test_slurm.py | 2 +- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/src/jobflow_remote/testing/__init__.py b/src/jobflow_remote/testing/__init__.py index 048e5005..9981ec95 100644 --- a/src/jobflow_remote/testing/__init__.py +++ b/src/jobflow_remote/testing/__init__.py @@ -43,19 +43,15 @@ def check_env_var() -> str: return os.environ.get("TESTING_ENV_VAR", "unset") -@job +@job(big_data="data") def add_big(a: float, b: float): """Adds two numbers together and inflates the answer - to an array too large to store in MongoDB, then tries - to store that within the defined store. + to a large list list and tries to store that within + the defined store. """ - import array - result = a + b - # create a 1.6 MB array that will be too large to store in MongoDB - # the array type "d" is a double-precision float, which is 8 bytes - big_array = array.array("d", [result] * 200_000) + big_array = [result] * 5_000 return Response({"data": big_array, "result": a + b}) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index c38caf8a..112d7589 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -198,7 +198,7 @@ def write_tmp_settings( "collection_name": "docs", }, "additional_stores": { - "big_files": { + "big_data": { "type": "GridFSStore", "database": store_database_name, "host": "localhost", diff --git a/tests/integration/test_slurm.py b/tests/integration/test_slurm.py index 4d373ba2..ed9adbe0 100644 --- a/tests/integration/test_slurm.py +++ b/tests/integration/test_slurm.py @@ -259,7 +259,7 @@ def test_additional_stores(worker, job_controller): runner.run(ticks=10) doc = job_controller.get_jobs({})[0] - fs = job_controller.jobstore.additional_stores["big_files"] + fs = job_controller.jobstore.additional_stores["big_data"] assert fs.count({"job_uuid": doc["job"]["uuid"]}) == 1 assert job_controller.count_jobs(state=JobState.COMPLETED) == 1 assert job_controller.count_flows(state=FlowState.COMPLETED) == 1