diff --git a/.github/workflows/create-release-n-publish.yml b/.github/workflows/create-release-n-publish.yml index bf674c6b..7b7c3629 100644 --- a/.github/workflows/create-release-n-publish.yml +++ b/.github/workflows/create-release-n-publish.yml @@ -102,7 +102,9 @@ jobs: - name: Test pip install multiple adapters run: pip install flowcept[mlflow,tensorboard] - name: Install our dependencies - run: pip install flowcept[all] # This will install all dependencies, for all adapters and dev deps. + run: pip install flowcept[all] + - name: Install ml_dev dependencies + run: pip install flowcept[ml_dev] - name: Pip list run: pip list - name: Run Docker Compose diff --git a/.github/workflows/run-tests-in-container.yml b/.github/workflows/run-tests-in-container.yml new file mode 100644 index 00000000..e6218d84 --- /dev/null +++ b/.github/workflows/run-tests-in-container.yml @@ -0,0 +1,24 @@ +name: Tests inside a Container +on: [pull_request] + +jobs: + + build: + runs-on: ubuntu-latest + timeout-minutes: 40 + if: "!contains(github.event.head_commit.message, 'CI Bot')" + + steps: + - uses: actions/checkout@v4 + + - name: Show OS Info + run: '[[ "$OSTYPE" == "linux-gnu"* ]] && { echo "OS Type: Linux"; (command -v lsb_release &> /dev/null && lsb_release -a) || cat /etc/os-release; uname -r; } || [[ "$OSTYPE" == "darwin"* ]] && { echo "OS Type: macOS"; sw_vers; uname -r; } || echo "Unsupported OS type: $OSTYPE"' + + - name: Build Flowcept's image + run: make build + + - name: Start dependent services (Mongo and Redis) + run: make services + + - name: Run tests in container + run: make tests-in-container diff --git a/.github/workflows/run-tests-kafka.yml b/.github/workflows/run-tests-kafka.yml index 98773500..a7aedc49 100644 --- a/.github/workflows/run-tests-kafka.yml +++ b/.github/workflows/run-tests-kafka.yml @@ -24,6 +24,7 @@ jobs: run: | python -m pip install --upgrade pip python -m pip install .[all] + python -m pip install .[ml_dev] - name: Run docker compose run: docker compose -f deployment/compose-kafka.yml up -d diff --git a/.github/workflows/run-tests-py11.yml b/.github/workflows/run-tests-py11.yml index d1321b30..29bbefc5 100644 --- a/.github/workflows/run-tests-py11.yml +++ b/.github/workflows/run-tests-py11.yml @@ -56,6 +56,7 @@ jobs: run: | python -m pip install --upgrade pip python -m pip install .[all] + python -m pip install .[ml_dev] - name: List installed packages run: pip list diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 3a0faeff..3f0e7423 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -32,30 +32,43 @@ jobs: - name: Install default dependencies and run simple test run: | pip install . - python examples/simple_instrumented_script.py + python examples/simple_instrumented_script.py | tee output.log + cat output.log + grep -q "ERROR" output.log && exit 1 + rm output.log - name: Install Dask dependencies alone and run a simple Dask test run: | pip uninstall flowcept -y pip install .[dask] - python examples/dask_example.py + python examples/dask_example.py | tee output.log + cat output.log + grep -q "ERROR" output.log && exit 1 + rm output.log - name: Install MLFlow dependencies alone and run a simple MLFlow test run: | pip uninstall flowcept -y pip install .[mlflow] - python examples/mlflow_example.py + python examples/mlflow_example.py | tee output.log + cat output.log + grep -q "ERROR" output.log && exit 1 + rm output.log - name: Install Tensorboard dependencies alone and run a simple Tensorboard test run: | pip uninstall flowcept -y pip install .[tensorboard] - python examples/tensorboard_example.py + python examples/tensorboard_example.py | tee output.log + cat output.log + grep -q "ERROR" output.log && exit 1 + rm output.log - name: Install all dependencies run: | python -m pip install --upgrade pip python -m pip install .[all] + python -m pip install .[ml_dev] - name: List installed packages run: pip list diff --git a/Makefile b/Makefile index 20f86ed4..e4671cc9 100644 --- a/Makefile +++ b/Makefile @@ -1,16 +1,19 @@ # Show help, place this first so it runs with just `make` help: @printf "\nCommands:\n" - @printf "\033[32mchecks\033[0m run ruff linter and formatter checks\n" - @printf "\033[32mreformat\033[0m run ruff linter and formatter\n" - @printf "\033[32mclean\033[0m remove cache directories and Sphinx build output\n" - @printf "\033[32mdocs\033[0m build HTML documentation using Sphinx\n" - @printf "\033[32mservices\033[0m run services using Docker\n" - @printf "\033[32mservices-stop\033[0m stop the running Docker services\n" - @printf "\033[32mtests\033[0m run unit tests with pytest\n" - @printf "\033[32mtests-all\033[0m run all unit tests with pytest, including very long-running ones\n" - @printf "\033[32mtests-notebooks\033[0m tests the notebooks, using pytest\n" - + @printf "\033[32mbuild\033[0m build the Docker image\n" + @printf "\033[32mrun\033[0m run the Docker container\n" + @printf "\033[32mliveness\033[0m check if the services are alive\n" + @printf "\033[32mservices\033[0m run services using Docker\n" + @printf "\033[32mservices-stop\033[0m stop the running Docker services\n" + @printf "\033[32mtests\033[0m run unit tests with pytest\n" + @printf "\033[32mtests-in-container\033[0m run unit tests with pytest inside Flowcept's container\n" + @printf "\033[32mtests-all\033[0m run all unit tests with pytest, including very long-running ones\n" + @printf "\033[32mtests-notebooks\033[0m tests the notebooks, using pytest\n" + @printf "\033[32mclean\033[0m remove cache directories and Sphinx build output\n" + @printf "\033[32mdocs\033[0m build HTML documentation using Sphinx\n" + @printf "\033[32mchecks\033[0m run ruff linter and formatter checks\n" + @printf "\033[32mreformat\033[0m run ruff linter and formatter\n" # Run linter and formatter checks using ruff checks: @@ -25,13 +28,15 @@ reformat: clean: rm -rf .ruff_cache rm -rf .pytest_cache - rm -rf mlruns rm -rf mnist_data rm -rf tensorboard_events rm -f docs_dump_tasks_* rm -f dump_test.json - rm -f flowcept.log - rm -f mlflow.db + find . -type f -name "*.log" -exec rm -f {} \; + find . -type f -name "*.pth" -exec rm -f {} \; + find . -type f -name "mlflow.db" -exec rm -f {} \; + find . -type d -name "mlruns" -exec rm -rf {} \; + find . -type d -name "__pycache__" -exec rm -rf {} \; 2>/dev/null sphinx-build -M clean docs docs/_build # Build the HTML documentation using Sphinx @@ -47,6 +52,20 @@ services: services-stop: docker compose --file deployment/compose.yml down --volumes +# Build a new Docker image for Flowcept +build: + bash deployment/build-image.sh + +run: + docker run --rm -v $(shell pwd):/flowcept -e KVDB_HOST=flowcept_redis -e MQ_HOST=flowcept_redis -e MONGO_HOST=flowcept_mongo --network flowcept_default -it flowcept + +tests-in-container: + docker run --rm -v $(shell pwd):/flowcept -e KVDB_HOST=flowcept_redis -e MQ_HOST=flowcept_redis -e MONGO_HOST=flowcept_mongo --network flowcept_default flowcept /opt/conda/envs/flowcept/bin/pytest --ignore=tests/decorator_tests/ml_tests + +# This command can be removed once we have our CLI +liveness: + python -c 'from flowcept import Flowcept; print(Flowcept.services_alive())' + # Run unit tests using pytest .PHONY: tests tests: @@ -59,3 +78,4 @@ tests-notebooks: .PHONY: tests-all tests-all: pytest + diff --git a/deployment/Dockerfile b/deployment/Dockerfile new file mode 100644 index 00000000..4dcbe32b --- /dev/null +++ b/deployment/Dockerfile @@ -0,0 +1,27 @@ +# Use the command `make build` to build this image. +FROM miniconda:local + +# Install vim +RUN apt-get update && \ + apt-get install -y vim curl wget make \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /flowcept + +COPY pyproject.toml Makefile README.md ./ +COPY src ./src +COPY resources ./resources +COPY notebooks ./notebooks +COPY tests ./tests +COPY examples ./examples + +RUN export FLOWCEPT_SETTINGS_PATH=$(realpath resources/sample_settings.yaml) \ + && echo "export FLOWCEPT_SETTINGS_PATH=$FLOWCEPT_SETTINGS_PATH" >> ~/.bashrc + +RUN conda create -n flowcept python=3.11.10 -y \ + && echo "conda activate flowcept" >> ~/.bashrc + +RUN conda run -n flowcept pip install -e .[all] # This is an overkill and will install many things you might not need. Please modify deployment/Dockerfile in case you do not need to install "all" dependencies. + +# Default command +CMD ["bash"] diff --git a/deployment/build-image.sh b/deployment/build-image.sh new file mode 100644 index 00000000..739ba143 --- /dev/null +++ b/deployment/build-image.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +if [ ! -d "src" ]; then + echo "Error: 'src' directory does not exist in the current path. Please run it from the project root." + exit 1 +fi + +# Download the Miniconda Dockerfile +echo "Downloading Miniconda Dockerfile..." +curl --silent -o Dockerfile_miniconda https://raw.githubusercontent.com/anaconda/docker-images/refs/heads/main/miniconda3/debian/Dockerfile +cat Dockerfile_miniconda + +# Build the Miniconda image locally +echo "Building miniconda:local image..." +docker build -t miniconda:local -f Dockerfile_miniconda . +rm Dockerfile_miniconda + +# Check if the Miniconda build failed +if [ $? -ne 0 ]; then + echo "Error: Miniconda image build failed." + exit 1 +fi + +echo "Miniconda image built successfully." +# Step 4: Build the flowcept image with both 'latest' and versioned tags +echo "Building flowcept image with latest and version tags..." +docker build -t flowcept:latest -f deployment/Dockerfile . + +# Check if the flowcept build succeeded +if [ $? -eq 0 ]; then + echo "Flowcept image built successfully with tags 'latest'." + echo "You can now run it using $> make run" +else + echo "Failed to build flowcept image." + exit 1 +fi + diff --git a/deployment/compose-full.yml b/deployment/compose-full.yml deleted file mode 100644 index 69ab44ba..00000000 --- a/deployment/compose-full.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: flowcept -services: - flowcept_redis: - container_name: flowcept_redis - image: redis - ports: - - 6379:6379 - - flowcept_mongo: - container_name: flowcept_mongo - image: mongo:latest - # volumes: - # - /Users/rsr/Downloads/mongo_data/db:/data/db - ports: - - 27017:27017 - - -# # This is just for the cases where one does not want to use the same Redis instance for caching and messaging, but -# # it's not required to have separate instances. -# # local_interceptor_cache: -# # container_name: local_interceptor_cache -# # image: redis -# # ports: -# # - 60379:6379 - - zambeze_rabbitmq: - container_name: zambeze_rabbitmq - image: rabbitmq:3.11-management - ports: - - 5672:5672 - - 15672:15672 diff --git a/deployment/compose.yml b/deployment/compose.yml index f3347c4f..712de8f7 100644 --- a/deployment/compose.yml +++ b/deployment/compose.yml @@ -9,18 +9,18 @@ services: flowcept_mongo: container_name: flowcept_mongo image: mongo:latest - # volumes: - # - /Users/rsr/Downloads/mongo_data/db:/data/db ports: - 27017:27017 +networks: + flowcept: + driver: bridge - -# # This is just for the cases where one does not want to use the same Redis instance for caching and messaging, but -# # it's not required to have separate instances. -# # local_interceptor_cache: -# # container_name: local_interceptor_cache -# # image: redis -# # ports: -# # - 60379:6379 +# This is just for the cases where one does not want to use the same Redis instance for caching and messaging, but +# it's not required to have separate instances. +# local_interceptor_cache: +# container_name: local_interceptor_cache +# image: redis +# ports: +# - 60379:6379 diff --git a/pyproject.toml b/pyproject.toml index 9a5aeb08..c0cfd014 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,7 +88,6 @@ all = [ "flowcept[responsibleai]", "flowcept[tensorboard]", "flowcept[dev]", - "flowcept[ml_dev]" ] [tool.hatch.version] diff --git a/resources/sample_settings.yaml b/resources/sample_settings.yaml index 9c64ab46..9b7d599e 100644 --- a/resources/sample_settings.yaml +++ b/resources/sample_settings.yaml @@ -91,8 +91,6 @@ adapters: file_path: mlflow.db log_params: ['*'] log_metrics: ['*'] - redis_host: localhost - redis_port: 6379 watch_interval_sec: 2 tensorboard: @@ -100,14 +98,10 @@ adapters: file_path: tensorboard_events log_tags: ['scalars', 'hparams', 'tensors'] log_metrics: ['accuracy'] - redis_host: localhost - redis_port: 6379 watch_interval_sec: 5 dask: kind: dask - redis_host: localhost - redis_port: 6379 worker_should_get_input: true scheduler_should_get_input: true worker_should_get_output: true diff --git a/src/flowcept/configs.py b/src/flowcept/configs.py index d3f29c8c..c84c5f33 100644 --- a/src/flowcept/configs.py +++ b/src/flowcept/configs.py @@ -17,11 +17,10 @@ if not os.path.exists(SETTINGS_PATH): SETTINGS_PATH = None - import importlib.resources + from importlib import resources - with importlib.resources.open_text("resources", "sample_settings.yaml") as f: + with resources.files("resources").joinpath("sample_settings.yaml").open("r") as f: settings = OmegaConf.load(f) - else: settings = OmegaConf.load(SETTINGS_PATH) @@ -82,9 +81,9 @@ # MongoDB Settings # ###################### -MONGO_URI = settings["mongodb"].get("uri", os.environ.get("MONGO_URI", None)) -MONGO_HOST = settings["mongodb"].get("host", os.environ.get("MONGO_HOST", "localhost")) -MONGO_PORT = int(settings["mongodb"].get("port", os.environ.get("MONGO_PORT", "27017"))) +MONGO_URI = os.environ.get("MONGO_URI", settings["mongodb"].get("uri", None)) +MONGO_HOST = os.environ.get("MONGO_HOST", settings["mongodb"].get("host", "localhost")) +MONGO_PORT = int(os.environ.get("MONGO_PORT", settings["mongodb"].get("port", 27017))) MONGO_DB = settings["mongodb"].get("db", PROJECT_NAME) MONGO_CREATE_INDEX = settings["mongodb"].get("create_collection_index", True) diff --git a/src/flowcept/flowceptor/adapters/dask/dask_dataclasses.py b/src/flowcept/flowceptor/adapters/dask/dask_dataclasses.py index fe8181ad..fdcea8e0 100644 --- a/src/flowcept/flowceptor/adapters/dask/dask_dataclasses.py +++ b/src/flowcept/flowceptor/adapters/dask/dask_dataclasses.py @@ -11,8 +11,6 @@ class DaskSettings(BaseSettings): """Dask settings.""" - redis_port: int - redis_host: str worker_should_get_input: bool worker_should_get_output: bool scheduler_should_get_input: bool diff --git a/src/flowcept/flowceptor/adapters/dask/dask_plugins.py b/src/flowcept/flowceptor/adapters/dask/dask_plugins.py index de7fcd96..7e2e725d 100644 --- a/src/flowcept/flowceptor/adapters/dask/dask_plugins.py +++ b/src/flowcept/flowceptor/adapters/dask/dask_plugins.py @@ -66,7 +66,7 @@ def transition(self, key, start, finish, *args, **kwargs): """Get the transition.""" self.interceptor.callback(key, start, finish, args, kwargs) - def close(self): + async def close(self): """Close it.""" self.interceptor.logger.debug("Going to close scheduler!") self.interceptor.stop() diff --git a/src/flowcept/flowceptor/adapters/interceptor_state_manager.py b/src/flowcept/flowceptor/adapters/interceptor_state_manager.py index 64e5f040..0137b81d 100644 --- a/src/flowcept/flowceptor/adapters/interceptor_state_manager.py +++ b/src/flowcept/flowceptor/adapters/interceptor_state_manager.py @@ -5,6 +5,7 @@ from flowcept.commons.flowcept_dataclasses.base_settings_dataclasses import ( BaseSettings, ) +from flowcept.configs import KVDB_HOST, KVDB_PORT class InterceptorStateManager(object): @@ -12,16 +13,9 @@ class InterceptorStateManager(object): def __init__(self, settings: BaseSettings): self._set_name = settings.key - - if not hasattr(settings, "redis_host"): - raise Exception( - f"This adapter setting {settings.key} manages state in Redis, so" - f"Redis Host is required in the settings yaml file." - ) - self._db = Redis( - host=settings.redis_host, - port=settings.redis_port, + host=KVDB_HOST, + port=KVDB_PORT, db=0, ) diff --git a/src/flowcept/flowceptor/adapters/mlflow/mlflow_dataclasses.py b/src/flowcept/flowceptor/adapters/mlflow/mlflow_dataclasses.py index 22392e95..5603eb6d 100644 --- a/src/flowcept/flowceptor/adapters/mlflow/mlflow_dataclasses.py +++ b/src/flowcept/flowceptor/adapters/mlflow/mlflow_dataclasses.py @@ -16,8 +16,6 @@ class MLFlowSettings(BaseSettings): log_params: List[str] log_metrics: List[str] watch_interval_sec: int - redis_port: int - redis_host: str kind = "mlflow" def __post_init__(self): diff --git a/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_dataclasses.py b/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_dataclasses.py index 6275289f..8b44f3a2 100644 --- a/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_dataclasses.py +++ b/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_dataclasses.py @@ -16,8 +16,6 @@ class TensorboardSettings(BaseSettings): log_tags: List[str] log_metrics: List[str] watch_interval_sec: int - redis_port: int - redis_host: str kind = "tensorboard" def __post_init__(self): diff --git a/tests/adapters/test_dask.py b/tests/adapters/test_dask.py index 30624486..d82dad5a 100644 --- a/tests/adapters/test_dask.py +++ b/tests/adapters/test_dask.py @@ -79,7 +79,6 @@ def test_dummyfunc(self): i1 = np.random.random() o1 = self.client.submit(dummy_func1, i1) # self.logger.debug(o1.result()) - return o1.key def test_long_workflow(self): i1 = np.random.random() @@ -88,7 +87,6 @@ def test_long_workflow(self): o2 = TestDask.client.submit(dummy_func2, o1) o3 = TestDask.client.submit(dummy_func3, o1, o2) self.logger.debug(o3.result()) - return o3.key def varying_args(self): i1 = np.random.random() @@ -108,7 +106,6 @@ def test_map_workflow(self): assert result > 0 self.logger.debug(f"{o.key}, {result}") sleep(3) - return o1 def test_evaluate_submit(self): wf_id = register_dask_workflow(self.client) @@ -136,7 +133,6 @@ def test_evaluate_submit(self): condition_to_evaluate=lambda docs: "phenome" in docs[0]["used"] and len(docs[0]["generated"]) > 0, ) - return o1 def test_map_workflow_kwargs(self): i1 = [ @@ -151,7 +147,6 @@ def test_map_workflow_kwargs(self): result = o.result() assert result["z"] > 0 self.logger.debug(o.key, result) - return o1 def error_task_submission(self): i1 = np.random.random() diff --git a/tests/adapters/test_mlflow.py b/tests/adapters/test_mlflow.py index fe367731..41660878 100644 --- a/tests/adapters/test_mlflow.py +++ b/tests/adapters/test_mlflow.py @@ -33,7 +33,10 @@ def setUpClass(cls): mlflow.delete_experiment(mlflow.create_experiment("starter")) sleep(1) - def test_pure_run_mlflow(self, epochs=10, batch_size=64): + def test_simple_mlflow_run(self): + self.simple_mlflow_run() + + def simple_mlflow_run(self, epochs=10, batch_size=64): experiment_name = "LinearRegression" experiment_id = mlflow.create_experiment(experiment_name + str(uuid.uuid4())) with mlflow.start_run(experiment_id=experiment_id) as run: @@ -55,14 +58,14 @@ def test_get_runs(self): self.logger.debug(run[0]) def test_get_run_data(self): - run_uuid = self.test_pure_run_mlflow() + run_uuid = self.simple_mlflow_run() run_data = TestMLFlow.interceptor.dao.get_run_data(run_uuid) assert run_data.task_id == run_uuid def test_check_state_manager(self): TestMLFlow.interceptor.state_manager.reset() TestMLFlow.interceptor.state_manager.add_element_id("dummy-value") - self.test_pure_run_mlflow() + self.simple_mlflow_run() runs = TestMLFlow.interceptor.dao.get_finished_run_uuids() assert len(runs) > 0 for run_tuple in runs: @@ -75,7 +78,7 @@ def test_check_state_manager(self): def test_observer_and_consumption(self): assert TestMLFlow.interceptor is not None with Flowcept(TestMLFlow.interceptor): - run_uuid = self.test_pure_run_mlflow() + run_uuid = self.simple_mlflow_run() sleep(5) print(run_uuid) assert evaluate_until( @@ -91,7 +94,7 @@ def test_multiple_tasks(self): run_ids = [] with Flowcept(self.interceptor): for i in range(1, 10): - run_ids.append(self.test_pure_run_mlflow(epochs=i * 10, batch_size=i * 2)) + run_ids.append(self.simple_mlflow_run(epochs=i * 10, batch_size=i * 2)) sleep(3) for run_id in run_ids: diff --git a/tests/decorator_tests/flowcept_task_decorator_test.py b/tests/decorator_tests/flowcept_task_decorator_test.py index c5e57055..9d375326 100644 --- a/tests/decorator_tests/flowcept_task_decorator_test.py +++ b/tests/decorator_tests/flowcept_task_decorator_test.py @@ -135,6 +135,42 @@ def print_system_stats(): ) +def simple_decorated_function( + max_tasks=10, start_doc_inserter=True, check_insertions=True +): + workflow_id = str(uuid.uuid4()) + print(workflow_id) + # TODO :refactor-base-interceptor: + consumer = Flowcept(start_doc_inserter=start_doc_inserter) + consumer.start() + t0 = time() + for i in range(max_tasks): + decorated_all_serializable(x=i, workflow_id=workflow_id) + t1 = time() + print("Decorated:") + print_system_stats() + consumer.stop() + decorated = t1 - t0 + print(workflow_id) + + if check_insertions: + assert assert_by_querying_tasks_until( + filter={"workflow_id": workflow_id}, + condition_to_evaluate=lambda docs: len(docs) == max_tasks, + max_time=60, + max_trials=60, + ) + + t0 = time() + for i in range(max_tasks): + not_decorated_func(x=i, workflow_id=workflow_id) + t1 = time() + print("Not Decorated:") + print_system_stats() + not_decorated = t1 - t0 + return decorated, not_decorated + + class DecoratorTests(unittest.TestCase): @lightweight_flowcept_task def decorated_function_with_self(self, x, workflow_id=None): @@ -160,41 +196,6 @@ def test_decorated_function(self): max_trials=60, ) - def test_decorated_function_simple( - self, max_tasks=10, start_doc_inserter=True, check_insertions=True - ): - workflow_id = str(uuid.uuid4()) - print(workflow_id) - # TODO :refactor-base-interceptor: - consumer = Flowcept(start_doc_inserter=start_doc_inserter) - consumer.start() - t0 = time() - for i in range(max_tasks): - decorated_all_serializable(x=i, workflow_id=workflow_id) - t1 = time() - print("Decorated:") - print_system_stats() - consumer.stop() - decorated = t1 - t0 - print(workflow_id) - - if check_insertions: - assert assert_by_querying_tasks_until( - filter={"workflow_id": workflow_id}, - condition_to_evaluate=lambda docs: len(docs) == max_tasks, - max_time=60, - max_trials=60, - ) - - t0 = time() - for i in range(max_tasks): - not_decorated_func(x=i, workflow_id=workflow_id) - t1 = time() - print("Not Decorated:") - print_system_stats() - not_decorated = t1 - t0 - return decorated, not_decorated - def test_online_offline(self): flowcept.configs.DB_FLUSH_MODE = "offline" # flowcept.instrumentation.decorators.instrumentation_interceptor = ( @@ -214,7 +215,7 @@ def test_decorated_function_timed(self): times = [] for i in range(10): times.append( - self.test_decorated_function_simple( + simple_decorated_function( max_tasks=10, # 100000, check_insertions=False, start_doc_inserter=False,