diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 0000000..5d1ab1a
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,40 @@
+name: Documentation building and deployment
+
+on:
+ release:
+ types: [published]
+ push:
+ branches:
+ - master
+
+jobs:
+ docs:
+ permissions:
+ contents: write
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - uses: pdm-project/setup-pdm@v4
+ name: Set up PDM
+ with:
+ python-version: "3.10"
+ allow-python-prereleases: false
+ cache: true
+ cache-dependency-path: |
+ ./pdm.lock
+
+ - name: Install dependencies
+ run: pdm install --group docs --no-default
+
+ - name: Build docs
+ run: pdm run docs-build-gh
+
+ - name: Deploy
+ uses: JamesIves/github-pages-deploy-action@v4
+ with:
+ folder: docs/_build
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..3a46e4d
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,173 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+docker/.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
+*.drawio.bkp
+*.drawio.dtmp
+*.pdf
+
+.idea/
+.vscode/
+
+# EOS config file
+/config.yml
+
+/user/*
+!/user/.gitkeep
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..bf69be5
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,11 @@
+Copyright 2024 The University of North Carolina at Chapel Hill
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..3ae7736
--- /dev/null
+++ b/README.md
@@ -0,0 +1,123 @@
+
+
+
+
+The Experiment Orchestration System (EOS)
+
+![os](https://img.shields.io/badge/OS-win%7Cmac%7Clinux-9cf)
+![python](https://img.shields.io/badge/Python-3.10+-darkgreen)
+
+The Experiment Orchestration System (EOS) is a comprehensive software framework and runtime for laboratory automation, designed
+to serve as the foundation for one or more automated or self-driving labs (SDLs).
+
+EOS provides:
+
+* A common framework to implement laboratory automation
+* A plugin system for defining labs, devices, experiments, tasks, and optimizers
+* A package system for sharing and reusing code and resources across the community
+* Extensive static and dynamic validation of experiments, task parameters, and more
+* A runtime for executing tasks, experiments, and experiment campaigns
+* A central authoritative orchestrator that can communicate with and control multiple devices
+* Distributed task execution and optimization using the Ray framework
+* Built-in Bayesian experiment parameter optimization
+* Optimized task scheduling
+* Device and sample container allocation system to prevent conflicts
+* Result aggregation such as automatic output file storage
+
+## Installation
+
+### 1. Install PDM
+
+PDM is used as the project manager for EOS, making it easier to install dependencies and build it.
+
+#### Linux/Mac
+
+```shell
+curl -sSL https://pdm-project.org/install-pdm.py | python3 -
+```
+
+#### Windows
+
+```shell
+(Invoke-WebRequest -Uri https://pdm-project.org/install-pdm.py -UseBasicParsing).Content | py -
+```
+
+### 2. Clone the EOS Repository
+
+```shell
+git clone https://github.com/aangelos28/eos
+```
+
+### 3. Install Dependencies
+
+Navigate to the cloned repository and run:
+
+```shell
+pdm install
+```
+
+(Optional) If you wish to contribute to EOS development:
+
+```shell
+pdm install -G dev
+```
+
+(Optional) If you also wish to contribute to the EOS documentation:
+
+```shell
+pdm install -G docs
+```
+
+## Configuration
+
+After installation, you need to configure external services such as MongoDB and MinIO as well as EOS itself.
+
+### 1. Configure External Services
+
+We provide a Docker Compose file that can run all external services for you.
+
+Copy the example environment file:
+
+```shell
+cp docker/.env.example docker/.env
+```
+
+Edit `docker/.env` and provide values for all fields.
+
+### 2. Configure EOS
+
+EOS reads parameters from a YAML configuration file.
+
+Copy the example configuration file:
+
+```shell
+cp config.example.yml config.yml
+```
+
+Edit `config.yml`. Ensure that credentials are provided for the MongoDB and MinIO services.
+
+## Running
+### 1. Start External Services
+
+```shell
+cd docker
+docker compose up -d
+```
+
+### 2. Source the Virtual Environment
+
+```shell
+source env/bin/activate
+```
+
+### 3. Start the EOS Orchestrator
+
+```shell
+eos orchestrator
+```
+
+### 4. Start the EOS REST API
+
+```shell
+eos api
+```
diff --git a/config.example.yml b/config.example.yml
new file mode 100644
index 0000000..108d0c4
--- /dev/null
+++ b/config.example.yml
@@ -0,0 +1,27 @@
+user_dir: ./user
+labs:
+ - lab1
+ - lab2
+experiments:
+ - experiment1
+ - experiment2
+log_level: INFO
+
+# EOS orchestrator's internal web API server configuration
+web_api:
+ host: localhost
+ port: 8070
+
+# EOS database configuration
+db:
+ host: localhost
+ port: 27017
+ username: ""
+ password: ""
+
+# EOS file database configuration
+file_db:
+ host: localhost
+ port: 9004
+ username: ""
+ password: ""
diff --git a/docker/.env.example b/docker/.env.example
new file mode 100644
index 0000000..3d6b778
--- /dev/null
+++ b/docker/.env.example
@@ -0,0 +1,28 @@
+# EOS #####################################
+COMPOSE_PROJECT_NAME=eos
+
+# MongoDB root username
+EOS_MONGO_INITDB_ROOT_USERNAME=
+
+# MongoDB root user password
+EOS_MONGO_INITDB_ROOT_PASSWORD=
+
+# MinIO root username
+EOS_MINIO_ROOT_USER=
+
+# MinIO root user password
+EOS_MINIO_ROOT_PASSWORD=
+
+# Budibase ################################
+# You can set the below to random values
+BB_JWT_SECRET=
+BB_MINIO_ACCESS_KEY=
+BB_MINIO_SECRET_KEY=
+BB_REDIS_PASSWORD=
+BB_COUCHDB_USER=
+BB_COUCHDB_PASSWORD=
+BB_INTERNAL_API_KEY=
+
+# Admin user credentials to login to Budibase
+BB_ADMIN_USER_EMAIL=
+BB_ADMIN_USER_PASSWORD=
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
new file mode 100644
index 0000000..11ad4bd
--- /dev/null
+++ b/docker/docker-compose.yml
@@ -0,0 +1,66 @@
+services:
+ eos-mongodb:
+ image: mongo:jammy
+ container_name: eos-mongodb
+ restart: unless-stopped
+ environment:
+ MONGO_INITDB_ROOT_USERNAME: ${EOS_MONGO_INITDB_ROOT_USERNAME}
+ MONGO_INITDB_ROOT_PASSWORD: ${EOS_MONGO_INITDB_ROOT_PASSWORD}
+ ports:
+ - "27017:27017"
+ networks:
+ - eos_network
+ volumes:
+ - mongodb_data:/data/db
+
+ eos-minio:
+ image: minio/minio:latest
+ container_name: eos-minio
+ restart: unless-stopped
+ environment:
+ MINIO_ROOT_USER: ${EOS_MINIO_ROOT_USER}
+ MINIO_ROOT_PASSWORD: ${EOS_MINIO_ROOT_PASSWORD}
+ ports:
+ - "9004:9000"
+ - "9005:9001"
+ networks:
+ - eos_network
+ volumes:
+ - minio_data:/data
+ command: server --console-address ":9001" /data
+
+ eos-budibase:
+ image: budibase/budibase:latest
+ container_name: eos-budibase
+ restart: unless-stopped
+ ports:
+ - "8080:80"
+ environment:
+ JWT_SECRET: ${BB_JWT_SECRET}
+ MINIO_ACCESS_KEY: ${BB_MINIO_ACCESS_KEY}
+ MINIO_SECRET_KEY: ${BB_MINIO_SECRET_KEY}
+ REDIS_PASSWORD: ${BB_REDIS_PASSWORD}
+ COUCHDB_USER: ${BB_COUCHDB_USER}
+ COUCHDB_PASSWORD: ${BB_COUCHDB_PASSWORD}
+ INTERNAL_API_KEY: ${BB_INTERNAL_API_KEY}
+ BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
+ BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
+ networks:
+ - eos_network
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ volumes:
+ - budibase_data:/data
+
+networks:
+ eos_network:
+ name: eos_network
+ driver: bridge
+
+volumes:
+ mongodb_data:
+ driver: local
+ minio_data:
+ driver: local
+ budibase_data:
+ driver: local
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..d4bb2cb
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = .
+BUILDDIR = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
new file mode 100644
index 0000000..e3d63a5
--- /dev/null
+++ b/docs/_static/custom.css
@@ -0,0 +1,3 @@
+.bd-page-width {
+ max-width: 100rem; /* default is 88rem */
+}
\ No newline at end of file
diff --git a/docs/_static/img/dmta-loop.png b/docs/_static/img/dmta-loop.png
new file mode 100644
index 0000000..9874f0a
Binary files /dev/null and b/docs/_static/img/dmta-loop.png differ
diff --git a/docs/_static/img/eos-computers.png b/docs/_static/img/eos-computers.png
new file mode 100644
index 0000000..52ca095
Binary files /dev/null and b/docs/_static/img/eos-computers.png differ
diff --git a/docs/_static/img/eos-logo.png b/docs/_static/img/eos-logo.png
new file mode 100644
index 0000000..34f45c1
Binary files /dev/null and b/docs/_static/img/eos-logo.png differ
diff --git a/docs/_static/img/example-package-tree.png b/docs/_static/img/example-package-tree.png
new file mode 100644
index 0000000..fdb1e3c
Binary files /dev/null and b/docs/_static/img/example-package-tree.png differ
diff --git a/docs/_static/img/experiment-graph.png b/docs/_static/img/experiment-graph.png
new file mode 100644
index 0000000..82b1401
Binary files /dev/null and b/docs/_static/img/experiment-graph.png differ
diff --git a/docs/_static/img/laboratory.png b/docs/_static/img/laboratory.png
new file mode 100644
index 0000000..aec03de
Binary files /dev/null and b/docs/_static/img/laboratory.png differ
diff --git a/docs/_static/img/optimize-experiment-loop.png b/docs/_static/img/optimize-experiment-loop.png
new file mode 100644
index 0000000..b343b1c
Binary files /dev/null and b/docs/_static/img/optimize-experiment-loop.png differ
diff --git a/docs/_static/img/package.png b/docs/_static/img/package.png
new file mode 100644
index 0000000..9442ea0
Binary files /dev/null and b/docs/_static/img/package.png differ
diff --git a/docs/_static/img/task-inputs-outputs.png b/docs/_static/img/task-inputs-outputs.png
new file mode 100644
index 0000000..a6c42b4
Binary files /dev/null and b/docs/_static/img/task-inputs-outputs.png differ
diff --git a/docs/_static/img/tasks-devices.png b/docs/_static/img/tasks-devices.png
new file mode 100644
index 0000000..8c04596
Binary files /dev/null and b/docs/_static/img/tasks-devices.png differ
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..bb7a298
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,50 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+
+project = "eos"
+copyright = "2024, UNC Robotics"
+author = "Angelos Angelopoulos"
+release = "0.3.0"
+
+extensions = [
+ "sphinx.ext.autosectionlabel",
+ "sphinx.ext.autodoc",
+ "sphinx.ext.napoleon",
+ "sphinx_design",
+ "sphinx_copybutton",
+ "sphinx_click",
+]
+
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
+
+templates_path = ["_templates"]
+
+# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+
+html_theme = "pydata_sphinx_theme"
+html_title = "EOS - The Experiment Orchestration System"
+html_static_path = ["_static"]
+html_css_files = [
+ "custom.css",
+]
+
+html_show_sourcelink = False
+
+html_theme_options = {
+ "logo": {
+ "text": "The Experiment Orchestration System",
+ "image_light": "_static/img/eos-logo.png",
+ "image_dark": "_static/img/eos-logo.png",
+ },
+ "navigation_with_keys": True,
+ "navbar_align": "left",
+ "show_toc_level": 1,
+}
+
+html_context = {"default_mode": "light"}
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..dce7765
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,25 @@
+The Experiment Orchestration System (EOS)
+=========================================
+
+The Experiment Orchestration System (EOS) is a comprehensive software framework and runtime for laboratory automation, designed
+to serve as the foundation for one or more automated or self-driving labs (SDLs).
+
+EOS provides:
+
+* A common framework to implement laboratory automation
+* A plugin system for defining labs, devices, experiments, tasks, and optimizers
+* A package system for sharing and reusing code and resources across the community
+* Extensive static and dynamic validation of experiments, task parameters, and more
+* A runtime for executing tasks, experiments, and experiment campaigns
+* A central authoritative orchestrator that can communicate with and control multiple devices
+* Distributed task execution and optimization using the Ray framework
+* Built-in Bayesian experiment parameter optimization
+* Optimized task scheduling
+* Device and sample container allocation system to prevent conflicts
+* Result aggregation such as automatic output file storage
+
+.. toctree::
+ :caption: User Guide
+ :maxdepth: 2
+
+ user-guide/index
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644
index 0000000..954237b
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=.
+set BUILDDIR=_build
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.https://www.sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/docs/user-guide/campaigns.rst b/docs/user-guide/campaigns.rst
new file mode 100644
index 0000000..9eb748a
--- /dev/null
+++ b/docs/user-guide/campaigns.rst
@@ -0,0 +1,126 @@
+Campaigns
+=========
+A campaign in EOS is an experiment that is executed multiple times in sequence. The parameters of the experiments usually
+differ. A campaign has some goals, such as to optimize some objectives by searching for optimal parameters.
+Campaigns are the highest-level execution unit in EOS, and can be used to implement autonomous (self-driving) labs.
+
+The DMTA loop is a common paradigm in autonomous experimentation and EOS campaigns can be used to implement it. EOS has
+built-in support for running campaigns of an experiment. In addition, EOS has a built-in Bayesian optimizer that can
+be used to optimize parameters.
+
+.. figure:: ../_static/img/dmta-loop.png
+ :alt: The DMTA Loop
+ :align: center
+
+Optimization Setup (Analyze and Design Phases)
+----------------------------------------------
+Both the "analyze" and "design" phases of the DMTA loop can be automated by optimizing the parameters of experiments over time.
+This is natively supported by EOS through a built-in Bayesian optimizer that integrates with the campaign execution module.
+It is also possible to customize the optimization to incorporate custom algorithms such as reinforcement learning.
+
+Let's look at the color mixing experiment to see how a campaign with optimization can be set up. There are six dynamic
+parameters, which are the inputs of the optimization problem:
+
+.. code-block:: yaml
+
+ # In the "dispense_colors" task
+ cyan_volume: eos_dynamic
+ magenta_volume: eos_dynamic
+ yellow_volume: eos_dynamic
+ black_volume: eos_dynamic
+
+ # In the "mix_colors" task
+ mixing_time: eos_dynamic
+ mixing_speed: eos_dynamic
+
+Looking at the task specification of the `score_color` task, we also see that there is an output parameter called "loss".
+
+:bdg-primary:`task.yml`
+
+.. code-block:: yaml
+
+ type: Score Color
+ description: Score a color based on how close it is to an expected color
+
+ input_parameters:
+ red:
+ type: integer
+ unit: n/a
+ description: The red component of the color
+ green:
+ type: integer
+ unit: n/a
+ description: The green component of the color
+ blue:
+ type: integer
+ unit: n/a
+ description: The blue component of the color
+
+ output_parameters:
+ loss:
+ type: decimal
+ unit: n/a
+ description: Total loss of the color compared to the expected color
+
+Taking all these together, we see that this experiment involves selecting CMYK color component volumes, as well as a
+mixing time and mixing speed and trying to minimize the loss of a synthesized color compared to an expected color.
+
+This setup is also summarized in the `optimizer.py` file adjacent to `experiment.yml`.
+
+:bdg-primary:`optimizer.py`
+
+.. code-block:: python
+
+ from typing import Type, Tuple, Dict
+
+ from bofire.data_models.acquisition_functions.acquisition_function import qNEI
+ from bofire.data_models.enum import SamplingMethodEnum
+ from bofire.data_models.features.continuous import ContinuousOutput, ContinuousInput
+ from bofire.data_models.objectives.identity import MinimizeObjective
+
+ from eos.optimization.sequential_bayesian_optimizer import BayesianSequentialOptimizer
+ from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+ def eos_create_campaign_optimizer() -> Tuple[Dict, Type[AbstractSequentialOptimizer]]:
+ constructor_args = {
+ "inputs": [
+ ContinuousInput(key="dispense_colors.cyan_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.magenta_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.yellow_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.black_volume", bounds=(0, 5)),
+ ContinuousInput(key="mix_colors.mixing_time", bounds=(1, 15)),
+ ContinuousInput(key="mix_colors.mixing_speed", bounds=(10, 500)),
+ ],
+ "outputs": [
+ ContinuousOutput(key="score_color.loss", objective=MinimizeObjective(w=1.0)),
+ ],
+ "constraints": [],
+ "acquisition_function": qNEI(),
+ "num_initial_samples": 50,
+ "initial_sampling_method": SamplingMethodEnum.SOBOL,
+ }
+
+ return constructor_args, BayesianSequentialOptimizer
+
+The `eos_create_campaign_optimizer` function is used to create the optimizer for the campaign. We can
+see that the inputs are composed of all the dynamic parameters in the experiment and the output is the "loss" output parameter
+from the "score_color" task. The objective of the optimizer (and the campaign) is to minimize this loss.
+
+More about optimizers can be found in the Optimizers section.
+
+Automation Setup (Make and Test Phases)
+---------------------------------------
+Execution of the automation is managed by EOS. The tasks and devices must be implemented by the user. Careful setup of
+the experiment is required to ensure that a campaign can be executed autonomously.
+
+Some guidelines:
+
+* Each experiment should be standalone and should not depend on previous experiments.
+* Each experiment should leave the laboratory in a state that allows the next experiment to be executed.
+* Dependencies between tasks should be minimized. A task should have a dependency on another task only if it is necessary.
+* Tasks should depend on any devices that they may be interacting with, even if they are not operating them. For example,
+ if a robot transfer task takes a container from device A to device B, then the robot arm and both devices A and B should be required
+ devices for the task.
+* Branches and loops are not supported. If these are needed, they should be encapsulated inside large tasks that may use
+ many devices and may represent several steps in the experiment.
diff --git a/docs/user-guide/configuration.rst b/docs/user-guide/configuration.rst
new file mode 100644
index 0000000..bf23d3d
--- /dev/null
+++ b/docs/user-guide/configuration.rst
@@ -0,0 +1,28 @@
+Configuration
+=============
+
+After installation, you need to configure external services such as MongoDB and MinIO as well as EOS itself.
+
+1. Configure External Services
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+We provide a Docker Compose file that can run all external services for you.
+
+Copy the example environment file:
+
+.. code-block:: shell
+
+ cp docker/.env.example docker/.env
+
+Edit `docker/.env` and provide values for all fields.
+
+2. Configure EOS
+^^^^^^^^^^^^^^^^
+EOS reads parameters from a YAML configuration file.
+
+Copy the example configuration file:
+
+.. code-block:: shell
+
+ cp config.example.yml config.yml
+
+Edit `config.yml`. Ensure that credentials are provided for the MongoDB and MinIO services.
diff --git a/docs/user-guide/devices.rst b/docs/user-guide/devices.rst
new file mode 100644
index 0000000..d1e5e12
--- /dev/null
+++ b/docs/user-guide/devices.rst
@@ -0,0 +1,107 @@
+Devices
+=======
+In EOS, a device is an abstraction for a physical or virtual apparatus. A device is used by one or more tasks to
+run some operations. Each device in EOS is managed by a dedicated process which is created the moment
+a laboratory definition is loaded. This process is usually implemented as a server and tasks call various functions
+from it. For example, there could be a device called "magnetic mixer", which communicates with a physical magnetic mixer via
+serial and provides functions such as `start`, `stop`, `set_time` and `set_speed`.
+
+.. figure:: ../_static/img/tasks-devices.png
+ :alt: EOS Tasks and Devices
+ :align: center
+
+In the figure above, we illustrate an example of devices and a task that uses these devices. The task in this example is
+Gas Chromatography (GC) sampling, which is implemented with a GC and a mobile manipulation robot for automating the
+sample injection with a syringe. Both the GC and the robot are physical devices, and each has a device implementation
+in EOS, which runs as a persistent process. Then, the GC Sampling task uses both of the EOS devices to automate the
+sample injection process.
+
+Most often, an EOS device will represent a physical device in the lab. But this need not always be the case. A device
+in EOS can be used to represent anything that needs persistent state throughout one or more experiments. This could
+be an AI module that records inputs given to it. Remember that a device in EOS is a persistent process.
+
+Device Implementation
+---------------------
+* Devices are implemented in the `devices` subdirectory inside an EOS package
+* Each device has its own subfolder (e.g., devices/magnetic_mixer)
+* There are two key files per device: `device.yml` and `device.py`
+
+YAML File (device.yml)
+~~~~~~~~~~~~~~~~~~~~~~
+* Specifies the device type, description, and initialization parameters
+* The same implementation can be used for multiple devices of the same type
+* Initialization parameters can be overridden in laboratory definition
+
+Below is an example device YAML file for a magnetic mixer:
+
+:bdg-primary:`device.yml`
+
+.. code-block:: yaml
+
+ type: magnetic_mixer
+ description: Magnetic mixer for mixing the contents of a container
+
+ initialization_parameters:
+ port: 5004
+
+Python File (device.py)
+~~~~~~~~~~~~~~~~~~~~~~~
+* Implements device functionality
+* All devices implementations must inherit from `BaseDevice`
+* The device class name must end with "Device" to be discovered by EOS
+
+Below is a example implementation of a magnetic mixer device:
+
+:bdg-primary:`device.py`
+
+.. code-block:: python
+
+ from typing import Dict, Any
+
+ from eos.containers.entities.container import Container
+ from eos.devices.base_device import BaseDevice
+ from user.color_lab.common.device_client import DeviceClient
+
+ class MagneticMixerDevice(BaseDevice):
+ def _initialize(self, initialization_parameters: Dict[str, Any]) -> None:
+ port = int(initialization_parameters["port"])
+ self.client = DeviceClient(port)
+ self.client.open_connection()
+
+ def _cleanup(self) -> None:
+ self.client.close_connection()
+
+ def _report(self) -> Dict[str, Any]:
+ return {}
+
+ def mix(self, container: Container, mixing_time: int, mixing_speed: int) -> Container:
+ result = self.client.send_command("mix", {"mixing_time": mixing_time, "mixing_speed": mixing_speed})
+ if result:
+ container.metadata["mixing_time"] = mixing_time
+ container.metadata["mixing_speed"] = mixing_speed
+
+ return container
+
+Let's walk through this example code:
+
+There are functions required in every device implementation:
+
+#. **_initialize**
+
+ * Called when device process is created
+ * Should set up necessary resources (e.g., serial connections)
+
+#. **_cleanup**
+
+ * Called when the device process is terminated
+ * Should clean up any resources created by the device process (e.g., serial connections)
+
+#. **_report**
+
+ * Should return any data needed to determine the state of the device (e.g., status and feedback)
+
+The magnetic mixer device also has the function `mix` for implementing the mixing operation. This function will be called
+by a task to mix the contents of a container. The `mix` function:
+
+* Sends a command to lower-level driver with a specified mixing time and speed to operate the magnetic mixer
+* Updates container metadata with mixing details
diff --git a/docs/user-guide/experiments.rst b/docs/user-guide/experiments.rst
new file mode 100644
index 0000000..2dcfc05
--- /dev/null
+++ b/docs/user-guide/experiments.rst
@@ -0,0 +1,286 @@
+Experiments
+===========
+Experiments are a set of tasks that are executed in a specific order. Experiments are represented as directed
+acyclic graphs (DAGs) where nodes are tasks and edges are dependencies between tasks. Tasks part of an experiment can
+pass parameters and containers to each other using EOS' reference system. Task parameters may be fully defined, with
+values provided for all task parameters or they may be left undefined by denoting them as dynamic parameters. Experiments with
+dynamic parameters can be used to run campaigns of experiments, where an optimizer generates the values for the
+dynamic parameters across repeated experiments to optimize some objectives.
+
+.. figure:: ../_static/img/experiment-graph.png
+ :alt: Example experiment graph
+ :align: center
+
+Above is an example of a possible experiment that could be implemented with EOS. There is a series of tasks, each
+requiring one or more devices. In addition to the task precedence dependencies with edges shown in the graph, there can
+also be dependencies in the form of parameters and containers passed between tasks. For example, the task "Mix Solutions"
+may take as input parameters the volumes of the solutions to mix, and these values may be output from the "Dispense Solutions"
+task. Tasks can reference input/output parameters and containers from other tasks.
+
+Experiment Implementation
+-------------------------
+* Experiments are implemented in the `experiments` subdirectory inside an EOS package
+* Each experiment has its own subfolder (e.g., experiments/optimize_yield)
+* There are two key files per experiment: `experiment.yml` and `optimizer.py` (for running campaigns with optimization)
+
+YAML File (experiment.yml)
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+Defines the experiment. Specifies the experiment type, labs, container initialization (optional), and tasks
+
+Below is an example experiment YAML file for an experiment to optimize parameters to synthesize a specific color:
+
+:bdg-primary:`experiment.yml`
+
+.. code-block:: yaml
+
+ type: color_mixing
+ description: Experiment to find optimal parameters to synthesize a desired color
+
+ labs:
+ - color_lab
+
+ tasks:
+ - id: retrieve_container
+ type: Retrieve Container
+ description: Get a random available container from storage and move it to the color dispenser
+ devices:
+ - lab_id: color_lab
+ id: robot_arm
+ - lab_id: color_lab
+ id: container_storage
+ containers:
+ c_a: c_a
+ c_b: c_b
+ c_c: c_c
+ c_d: c_d
+ c_e: c_e
+ parameters:
+ target_location: color_dispenser
+ dependencies: []
+
+ - id: dispense_colors
+ type: Dispense Colors
+ description: Dispense a color from the color dispenser into the container
+ devices:
+ - lab_id: color_lab
+ id: color_dispenser
+ containers:
+ beaker: retrieve_container.beaker
+ parameters:
+ cyan_volume: eos_dynamic
+ magenta_volume: eos_dynamic
+ yellow_volume: eos_dynamic
+ black_volume: eos_dynamic
+ dependencies: [retrieve_container]
+
+ - id: move_container_to_mixer
+ type: Move Container
+ description: Move the container to the magnetic mixer
+ devices:
+ - lab_id: color_lab
+ id: robot_arm
+ - lab_id: color_lab
+ id: magnetic_mixer
+ containers:
+ beaker: dispense_colors.beaker
+ parameters:
+ target_location: magnetic_mixer
+ dependencies: [dispense_colors]
+
+ - id: mix_colors
+ type: Magnetic Mixing
+ description: Mix the colors in the container
+ devices:
+ - lab_id: color_lab
+ id: magnetic_mixer
+ containers:
+ beaker: move_container_to_mixer.beaker
+ parameters:
+ mixing_time: eos_dynamic
+ mixing_speed: eos_dynamic
+ dependencies: [move_container_to_mixer]
+
+ - id: move_container_to_analyzer
+ type: Move Container
+ description: Move the container to the color analyzer
+ devices:
+ - lab_id: color_lab
+ id: robot_arm
+ - lab_id: color_lab
+ id: color_analyzer
+ containers:
+ beaker: mix_colors.beaker
+ parameters:
+ target_location: color_analyzer
+ dependencies: [mix_colors]
+
+ - id: analyze_color
+ type: Analyze Color
+ description: Analyze the color of the solution in the container and output the RGB values
+ devices:
+ - lab_id: color_lab
+ id: color_analyzer
+ containers:
+ beaker: move_container_to_analyzer.beaker
+ dependencies: [move_container_to_analyzer]
+
+ - id: score_color
+ type: Score Color
+ description: Score the color based on the RGB values
+ parameters:
+ red: analyze_color.red
+ green: analyze_color.green
+ blue: analyze_color.blue
+ dependencies: [analyze_color]
+
+ - id: empty_container
+ type: Empty Container
+ description: Empty the container and move it to the cleaning station
+ devices:
+ - lab_id: color_lab
+ id: robot_arm
+ - lab_id: color_lab
+ id: cleaning_station
+ containers:
+ beaker: analyze_color.beaker
+ parameters:
+ emptying_location: emptying_location
+ target_location: cleaning_station
+ dependencies: [analyze_color]
+
+ - id: clean_container
+ type: Clean Container
+ description: Clean the container by rinsing it with distilled water
+ devices:
+ - lab_id: color_lab
+ id: cleaning_station
+ containers:
+ beaker: empty_container.beaker
+ dependencies: [empty_container]
+
+ - id: store_container
+ type: Store Container
+ description: Store the container back in the container storage
+ devices:
+ - lab_id: color_lab
+ id: robot_arm
+ - lab_id: color_lab
+ id: container_storage
+ containers:
+ beaker: clean_container.beaker
+ parameters:
+ storage_location: container_storage
+ dependencies: [clean_container]
+
+Let's dissect this file:
+
+.. code-block:: yaml
+
+ type: color_mixing
+ description: Experiment to find optimal parameters to synthesize a desired color
+
+ labs:
+ - color_lab
+
+Every experiment has a type. The type is used to essentially identify the class of experiment. When an experiment is running
+then there are instances of the experiment with different IDs. Each experiment also requires one or more labs.
+
+Now let's look at the first task in the experiment:
+
+.. code-block:: yaml
+
+ - id: retrieve_container
+ type: Retrieve Container
+ description: Get a random available container from storage and move it to the color dispenser
+ devices:
+ - lab_id: color_lab
+ id: robot_arm
+ - lab_id: color_lab
+ id: container_storage
+ containers:
+ c_a: c_a
+ c_b: c_b
+ c_c: c_c
+ c_d: c_d
+ c_e: c_e
+ parameters:
+ target_location: color_dispenser
+ dependencies: []
+
+The first task is named `retrieve_container` and is of type `Retrieve Container`. This task uses the robot arm to get
+a random container from storage. The task requires two devices, the robot arm and the container storage. There are five
+containers passed to it, "c_a" through "c_e". There is also a parameter `target_location` that is set to `color_dispenser`.
+This task has no dependencies as it is the first task in the experiment and is essentially a container feeder.
+There are five containers in storage, and one of them is chosen at random for the experiment. All five containers in our
+"color lab" are passed to this task, as any one of them could be chosen.
+
+Let's look at the next task:
+
+.. code-block:: yaml
+
+ - id: dispense_colors
+ type: Dispense Colors
+ description: Dispense a color from the color dispenser into the container
+ devices:
+ - lab_id: color_lab
+ id: color_dispenser
+ containers:
+ beaker: retrieve_container.beaker
+ parameters:
+ cyan_volume: eos_dynamic
+ magenta_volume: eos_dynamic
+ yellow_volume: eos_dynamic
+ black_volume: eos_dynamic
+ dependencies: [retrieve_container]
+
+This task takes the container from the `retrieve_container` task and dispenses colors into it. The task has an
+input container called "beaker" which references the output container named "beaker" from the `retrieve_container` task.
+If we look at the `task.yml` file of the task `Retrieve Container` we would see that a container named "beaker" is
+defined in `output_containers`. There are also four parameters, the CMYK volumes to dispense. All these parameters are
+set to `eos_dynamic`, which is a special keyword in EOS for defining dynamic parameters, instructing the system that
+these parameters must be specified either by the user or an optimizer before an experiment is run.
+
+Optimizer File (optimizer.py)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Contains a function that returns the constructor arguments for and the optimizer class type for an optimizer.
+
+As an example, below is the optimizer file for the color mixing experiment:
+
+:bdg-primary:`optimizer.py`
+
+.. code-block:: python
+
+ from typing import Type, Tuple, Dict
+
+ from bofire.data_models.acquisition_functions.acquisition_function import qNEI
+ from bofire.data_models.enum import SamplingMethodEnum
+ from bofire.data_models.features.continuous import ContinuousOutput, ContinuousInput
+ from bofire.data_models.objectives.identity import MinimizeObjective
+
+ from eos.optimization.sequential_bayesian_optimizer import BayesianSequentialOptimizer
+ from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+ def eos_create_campaign_optimizer() -> Tuple[Dict, Type[AbstractSequentialOptimizer]]:
+ constructor_args = {
+ "inputs": [
+ ContinuousInput(key="dispense_colors.cyan_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.magenta_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.yellow_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.black_volume", bounds=(0, 5)),
+ ContinuousInput(key="mix_colors.mixing_time", bounds=(1, 15)),
+ ContinuousInput(key="mix_colors.mixing_speed", bounds=(10, 500)),
+ ],
+ "outputs": [
+ ContinuousOutput(key="score_color.loss", objective=MinimizeObjective(w=1.0)),
+ ],
+ "constraints": [],
+ "acquisition_function": qNEI(),
+ "num_initial_samples": 50,
+ "initial_sampling_method": SamplingMethodEnum.SOBOL,
+ }
+
+ return constructor_args, BayesianSequentialOptimizer
+
+The `optimizer.py` file is optional and only required for running experiment campaigns with optimization managed by EOS.
+More on optimizers can be found in the Optimizers section of the User Guide.
diff --git a/docs/user-guide/index.rst b/docs/user-guide/index.rst
new file mode 100644
index 0000000..4260be1
--- /dev/null
+++ b/docs/user-guide/index.rst
@@ -0,0 +1,25 @@
+User Guide
+==========
+
+.. toctree::
+ :caption: Getting Started
+
+ installation
+ configuration
+ running
+
+.. toctree::
+ :caption: Concepts
+
+ packages
+ devices
+ laboratories
+ tasks
+ experiments
+ campaigns
+ optimizers
+
+.. toctree::
+ :caption: Advanced
+
+ jinja2_templating
\ No newline at end of file
diff --git a/docs/user-guide/installation.rst b/docs/user-guide/installation.rst
new file mode 100644
index 0000000..736ce25
--- /dev/null
+++ b/docs/user-guide/installation.rst
@@ -0,0 +1,59 @@
+Installation
+============
+
+EOS should be installed on a capable computer in the laboratory. We recommend a central
+computer that is easily accessible.
+
+.. note::
+ If EOS will be connecting to other computers to run automation, then you must ensure that the computer where EOS
+ is installed has bi-directional network access to the other computers.
+
+ We strongly recommend that the laboratory has its own isolated network for security and performance reasons.
+ See our infrastructure setup guide for more information.
+
+EOS also requires a MongoDB database, a MinIO object storage server, and (for now) Budibase for the web UI.
+We provide a Docker Compose file that can set up all of these services for you.
+
+1. Install PDM
+^^^^^^^^^^^^^^
+PDM is used as the project manager for EOS, making it easier to install dependencies and build it.
+
+.. tab-set::
+
+ .. tab-item:: Linux/Mac
+
+ .. code-block:: shell
+
+ curl -sSL https://pdm-project.org/install-pdm.py | python3 -
+
+ .. tab-item:: Windows
+
+ .. code-block:: shell
+
+ (Invoke-WebRequest -Uri https://pdm-project.org/install-pdm.py -UseBasicParsing).Content | py -
+
+2. Clone the EOS Repository
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+.. code-block:: shell
+
+ git clone https://github.com/aangelos28/eos
+
+3. Install Dependencies
+^^^^^^^^^^^^^^^^^^^^^^^
+Navigate to the cloned repository and run:
+
+.. code-block:: shell
+
+ pdm install
+
+(Optional) If you wish to contribute to EOS development:
+
+.. code-block:: shell
+
+ pdm install -G dev
+
+(Optional) If you also wish to contribute to the EOS documentation:
+
+.. code-block:: shell
+
+ pdm install -G docs
diff --git a/docs/user-guide/jinja2_templating.rst b/docs/user-guide/jinja2_templating.rst
new file mode 100644
index 0000000..ae9e428
--- /dev/null
+++ b/docs/user-guide/jinja2_templating.rst
@@ -0,0 +1,24 @@
+Jinja2 Templating
+=================
+The EOS YAML files used to define labs, devices, experiments, and tasks support Jinja2 templating. This allows easier
+authoring of complex YAML files by enabling the use of variables, loops, and conditionals. Jinja2 templates are evaluated
+with Python, so some of the expressions are the same as in Python.
+
+.. note::
+ Jinja2 templates are evaluated during loading of the YAML file, not during runtime.
+
+Below is the "containers" portion of a lab YAML file that uses Jinja2 templating:
+
+:bdg-primary:`lab.yml`
+
+.. code-block:: yaml+jinja
+
+ containers:
+ - type: beaker
+ location: container_storage
+ metadata:
+ capacity: 300
+ ids:
+ {% for letter in ['a', 'b', 'c', 'd', 'e'] %}
+ - c_{{ letter }}
+ {% endfor %}
diff --git a/docs/user-guide/laboratories.rst b/docs/user-guide/laboratories.rst
new file mode 100644
index 0000000..8900428
--- /dev/null
+++ b/docs/user-guide/laboratories.rst
@@ -0,0 +1,267 @@
+Laboratories
+============
+Laboratories are the space in which devices and containers exist and where tasks, experiments, and campaigns
+of experiments take place.
+
+A laboratory in EOS is a collection of:
+
+* Locations (e.g., physical stations around the lab)
+* Computers (e.g., devices capable of controlling equipment)
+* Devices (e.g., equipment/apparatuses in the laboratory)
+* Containers (e.g., vessels for holding samples)
+
+.. figure:: ../_static/img/laboratory.png
+ :alt: Contents of a laboratory
+ :align: center
+
+Laboratory Implementation
+-------------------------
+* Laboratories are implemented in the `laboratories` subdirectory inside an EOS package
+* Each laboratory has its own subfolder (e.g., laboratories/color_lab)
+* The laboratory is defined in a YAML file named `laboratory.yml`
+
+Below is an example laboratory YAML file for a solar cell fabrication lab:
+
+:bdg-primary:`lab.yml`
+
+.. code-block:: yaml
+
+ type: solar_cell_fabrication_lab
+ description: A laboratory for fabricating and characterizing perovskite solar cells
+
+ locations:
+ glovebox:
+ description: Nitrogen-filled glovebox
+ metadata:
+ map_coordinates:
+ x: 10
+ y: 20
+ theta: 0
+ fume_hood:
+ description: Fume hood for solution preparation and coating
+ annealing_station:
+ description: Hotplate for thermal annealing
+ evaporation_chamber:
+ description: Thermal evaporation chamber for electrode deposition
+ characterization_room:
+ description: Room for solar cell performance testing
+
+ computers:
+ xrd_computer:
+ description: XRD system control and data analysis
+ ip: 192.168.1.101
+ solar_sim_computer:
+ description: Solar simulator control and J-V measurements
+ ip: 192.168.1.102
+ robot_computer:
+ description: Mobile manipulation robot control
+ ip: 192.168.1.103
+
+ devices:
+ spin_coater:
+ description: Spin coater for depositing perovskite and transport layers
+ type: spin_coater
+ location: glovebox
+ computer: eos_computer
+
+ uv_ozone_cleaner:
+ description: UV-Ozone cleaner for substrate treatment
+ type: uv_ozone_cleaner
+ location: fume_hood
+ computer: eos_computer
+
+ thermal_evaporator:
+ description: Thermal evaporator for metal electrode deposition
+ type: thermal_evaporator
+ location: evaporation_chamber
+ computer: eos_computer
+ initialization_parameters:
+ max_temperature: 1000C
+ materials: [Au, Ag, Al]
+
+ solar_simulator:
+ description: Solar simulator for J-V curve measurements
+ type: solar_simulator
+ location: characterization_room
+ computer: solar_sim_computer
+ initialization_parameters:
+ spectrum: AM1.5G
+ intensity: 100mW/cm2
+
+ xrd_system:
+ description: X-ray diffractometer for crystal structure analysis
+ type: xrd
+ location: characterization_room
+ computer: xrd_computer
+
+ mobile_robot:
+ description: Mobile manipulation robot for automated sample transfer
+ type: mobile_robot
+ location: characterization_room
+ computer: robot_computer
+ initialization_parameters:
+ locations:
+ - glovebox
+ - fume_hood
+ - annealing_station
+ - evaporation_chamber
+ - characterization_room
+
+ containers:
+ - type: vial
+ location: glovebox
+ metadata:
+ solvent: 20 #ml
+ ids:
+ - precursor_vial_1
+ - precursor_vial_2
+ - precursor_vial_3
+
+ - type: petri_dish
+ location: glovebox
+ metadata:
+ capacity: 100 #ml
+ ids:
+ - substrate_dish_1
+ - substrate_dish_2
+
+ - type: crucible
+ location: evaporation_chamber
+ metadata:
+ capacity: 5 #ml
+ ids:
+ - au_crucible
+ - ag_crucible
+
+Locations (Optional)
+""""""""""""""""""""
+Locations are physical stations around the lab where devices and containers are placed. They are defined in the
+`locations` section of the laboratory YAML file. You can define metadata for each location, such as map coordinates
+for a mobile robot. Defining locations is optional.
+
+.. code-block:: yaml
+
+ locations:
+ glovebox:
+ description: Nitrogen-filled glovebox
+ metadata:
+ map_coordinates:
+ x: 10
+ y: 20
+ theta: 0
+ fume_hood:
+ description: Fume hood for solution preparation and coating
+ annealing_station:
+ description: Hotplate for thermal annealing
+ evaporation_chamber:
+ description: Thermal evaporation chamber for electrode deposition
+ characterization_room:
+ description: Room for solar cell performance testing
+
+Computers (Optional)
+""""""""""""""""""""
+Computers control devices and host EOS devices. Each computer that is required to interface with one or
+more devices must be defined in this section. The IP address of each computer must be specified.
+
+There is always a computer in each lab called **eos_computer** that has the IP "127.0.0.1". This computer is the computer
+that runs the EOS orchestrator, and can be thought of as the "central" computer. No other computer named "eos_computer"
+is allowed, and no other computer can have the IP "127.0.0.1". The "computers" section need not be defined unless
+additional computers are required (e.g., if not all devices are connected to eos_computer).
+
+.. figure:: ../_static/img/eos-computers.png
+ :alt: EOS computers
+ :align: center
+
+.. code-block:: yaml
+
+ computers:
+ xrd_computer:
+ description: XRD system control and data analysis
+ ip: 192.168.1.101
+ solar_sim_computer:
+ description: Solar simulator control and J-V measurements
+ ip: 192.168.1.102
+ robot_computer:
+ description: Mobile manipulation robot control
+ ip: 192.168.1.103
+
+Devices (Required)
+""""""""""""""""""
+Devices are equipment or apparatuses in the laboratory that are required to perform tasks. Each device must have a unique
+name inside the lab and must be defined in the `devices` section of the laboratory YAML file.
+
+.. code-block:: yaml
+
+ devices:
+ spin_coater:
+ description: Spin coater for depositing perovskite and transport layers
+ type: spin_coater
+ location: glovebox
+ computer: eos_computer
+
+ uv_ozone_cleaner:
+ description: UV-Ozone cleaner for substrate treatment
+ type: uv_ozone_cleaner
+ location: fume_hood
+ computer: eos_computer
+
+ thermal_evaporator:
+ description: Thermal evaporator for metal electrode deposition
+ type: thermal_evaporator
+ location: evaporation_chamber
+ computer: eos_computer
+ initialization_parameters:
+ max_temperature: 1000C
+ materials: [Au, Ag, Al]
+
+**type**: Every device must have a type, which matches a device specification (e.g., defined in the `devices` subdirectory
+of an EOS package). There can be multiple devices with different names of the same type.
+
+**location** (optional): The location where the device is at.
+
+**computer**: The computer that controls the device. If not "eos_computer", the computer must be defined in the
+"computers" section.
+
+**initialization_parameters** (optional): Parameters required to initialize the device. These parameters are defined
+in the device specification and can be overridden here.
+
+Containers (Optional)
+"""""""""""""""""""""
+Containers are vessels for holding samples and are how samples go around the lab (e.g., for batch processing). They are
+defined in the `containers` section of the laboratory YAML file.
+
+.. code-block:: yaml
+
+ containers:
+ - type: vial
+ location: glovebox
+ metadata:
+ capacity: 20 #ml
+ ids:
+ - precursor_vial_1
+ - precursor_vial_2
+ - precursor_vial_3
+
+ - type: petri_dish
+ location: glovebox
+ metadata:
+ capacity: 100 #ml
+ ids:
+ - substrate_dish_1
+ - substrate_dish_2
+
+ - type: crucible
+ location: evaporation_chamber
+ metadata:
+ capacity: 5 #ml
+ ids:
+ - au_crucible
+ - ag_crucible
+
+**type**: Every container must have a type, which can be used to group together containers of the same type.
+
+**location** (optional): The location where the container starts out at.
+
+**metadata** (optional): Any additional information about the container, such as its capacity or contained sample.
+
+**ids**: A list of unique identifiers for each container. These are used to identify and refer to specific containers.
diff --git a/docs/user-guide/optimizers.rst b/docs/user-guide/optimizers.rst
new file mode 100644
index 0000000..1f7e8c1
--- /dev/null
+++ b/docs/user-guide/optimizers.rst
@@ -0,0 +1,171 @@
+Optimizers
+==========
+Optimizers are key to building an autonomous laboratory. In EOS, optimizers give intelligence to experiment campaigns
+by optimizing task parameters to achieve objectives over time. Optimizers in EOS are *sequential*, meaning they iteratively
+optimize parameters by drawing insights from previous experiments. One of the most common sequential optimization
+methods is **Bayesian optimization**, and is especially useful for optimizing expensive-to-evaluate black box functions.
+
+.. figure:: ../_static/img/optimize-experiment-loop.png
+ :alt: Optimization and experiment loop
+ :align: center
+
+EOS has a built-in Bayesian optimizer powered by `BoFire `_
+(based on `BoTorch `_). This optimizer supports both constrained single-objective and multi-objective
+Bayesian optimization. It offers several different surrogate models, including Gaussian Processes (GPs) and
+Multi-Layer Perceptrons (MLPs), along with various acquisition functions.
+
+Distributed Execution
+---------------------
+EOS optimizers are created in a dedicated Ray actor process. This actor process can be created in any computer with an
+active Ray worker. This can enable running the optimizer on a more capable computer than the one running
+the EOS orchestrator.
+
+Optimizer Implementation
+------------------------
+EOS optimizers are defined in the `optimizer.py` file adjacent to `experiment.yml` in an EOS package. Below is an example:
+
+:bdg-primary:`optimizer.py`
+
+.. code-block:: python
+
+ from bofire.data_models.acquisition_functions.acquisition_function import qNEI
+ from bofire.data_models.enum import SamplingMethodEnum
+ from bofire.data_models.features.continuous import ContinuousOutput, ContinuousInput
+ from bofire.data_models.objectives.identity import MinimizeObjective
+
+ from eos.optimization.sequential_bayesian_optimizer import BayesianSequentialOptimizer
+ from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+ def eos_create_campaign_optimizer() -> tuple[dict, type[AbstractSequentialOptimizer]]:
+ constructor_args = {
+ "inputs": [
+ ContinuousInput(key="dispense_colors.cyan_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.magenta_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.yellow_volume", bounds=(0, 5)),
+ ContinuousInput(key="dispense_colors.black_volume", bounds=(0, 5)),
+ ContinuousInput(key="mix_colors.mixing_time", bounds=(1, 15)),
+ ContinuousInput(key="mix_colors.mixing_speed", bounds=(10, 500)),
+ ],
+ "outputs": [
+ ContinuousOutput(key="score_color.loss", objective=MinimizeObjective(w=1.0)),
+ ],
+ "constraints": [],
+ "acquisition_function": qNEI(),
+ "num_initial_samples": 50,
+ "initial_sampling_method": SamplingMethodEnum.SOBOL,
+ }
+
+ return constructor_args, BayesianSequentialOptimizer
+
+Each `optimizer.py` file must contain the function `eos_create_campaign_optimizer`. This function must return:
+
+#. The constructor arguments to make an optimizer class instance
+#. The class type of the optimizer
+
+In this example, we use EOS' built-in Bayesian optimizer. However, it is also possible to define custom optimizers in this
+file, and simply return the constructor arguments and the class type from `eos_create_campaign_optimizer`.
+
+.. note::
+ All optimizers must inherit from the class `AbstractSequentialOptimizer` under the `eos.optimization` module.
+
+Input and Output Parameter Naming
+"""""""""""""""""""""""""""""""""
+The names of input and output parameters must reference task parameters. The EOS reference format must be used:
+
+**TASK.PARAMETER_NAME**
+
+This is necessary for EOS to be able to associate the optimizer with the experiment tasks and to forward parameter values
+where needed.
+
+Example Custom Optimizer
+------------------------
+Below is an example of a custom optimizer implementation that randomly samples parameters for the same color mixing problem:
+
+:bdg-primary:`optimizer.py`
+
+.. code-block:: python
+
+ import random
+ from dataclasses import dataclass
+ from enum import Enum
+ import pandas as pd
+
+ from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+ class ObjectiveType(Enum):
+ MINIMIZE = 1
+ MAXIMIZE = 2
+
+
+ @dataclass
+ class Parameter:
+ name: str
+ lower_bound: float
+ upper_bound: float
+
+
+ @dataclass
+ class Metric:
+ name: str
+ objective: ObjectiveType
+
+
+ class RandomSamplingOptimizer(AbstractSequentialOptimizer):
+ def __init__(self, parameters: list[Parameter], metrics: list[Metric]):
+ self.parameters = parameters
+ self.metrics = metrics
+ self.results = []
+
+ def sample(self, num_experiments: int = 1) -> pd.DataFrame:
+ samples = []
+ for _ in range(num_experiments):
+ sample = {param.name: random.uniform(param.lower_bound, param.upper_bound) for param in self.parameters}
+ samples.append(sample)
+ return pd.DataFrame(samples)
+
+ def report(self, inputs_df: pd.DataFrame, outputs_df: pd.DataFrame) -> None:
+ for _, row in pd.concat([inputs_df, outputs_df], axis=1).iterrows():
+ self.results.append(row.to_dict())
+
+ def get_optimal_solutions(self) -> pd.DataFrame:
+ if not self.results:
+ return pd.DataFrame(
+ columns=[param.name for param in self.parameters] + [metric.name for metric in self.metrics]
+ )
+
+ df = pd.DataFrame(self.results)
+ optimal_solutions = []
+
+ for metric in self.metrics:
+ if metric.objective == ObjectiveType.MINIMIZE:
+ optimal = df.loc[df[metric.name].idxmin()]
+ else:
+ optimal = df.loc[df[metric.name].idxmax()]
+ optimal_solutions.append(optimal)
+
+ return pd.DataFrame(optimal_solutions)
+
+ def get_input_names(self) -> List[str]:
+ return [param.name for param in self.parameters]
+
+ def get_output_names(self) -> List[str]:
+ return [metric.name for metric in self.metrics]
+
+ def eos_create_campaign_optimizer() -> tuple[dict, type[AbstractSequentialOptimizer]]:
+ constructor_args = {
+ "parameters": [
+ Parameter(name="dispense_colors.cyan_volume", lower_bound=0, upper_bound=5),
+ Parameter(name="dispense_colors.magenta_volume", lower_bound=0, upper_bound=5),
+ Parameter(name="dispense_colors.yellow_volume", lower_bound=0, upper_bound=5),
+ Parameter(name="dispense_colors.black_volume", lower_bound=0, upper_bound=5),
+ Parameter(name="mix_colors.mixing_time", lower_bound=1, upper_bound=15),
+ Parameter(name="mix_colors.mixing_speed", lower_bound=10, upper_bound=500),
+ ],
+ "metrics": [
+ Metric(name="score_color.loss", objective=ObjectiveType.MINIMIZE),
+ ],
+ }
+
+ return constructor_args, RandomSamplingOptimizer
\ No newline at end of file
diff --git a/docs/user-guide/packages.rst b/docs/user-guide/packages.rst
new file mode 100644
index 0000000..02bdd6e
--- /dev/null
+++ b/docs/user-guide/packages.rst
@@ -0,0 +1,31 @@
+Packages
+========
+Code and resources in EOS are organized into packages, which are discovered and loaded at runtime.
+Each package is essentially a folder. These packages can contain laboratory, device, task, and experiment definitions,
+code, and data, allowing reuse and sharing across the community. For example, a package can contain task and device
+implementations for equipment from a specific manufacturer, while another package may only contain experiments that
+run on a specific lab.
+
+.. figure:: ../_static/img/package.png
+ :alt: EOS package
+ :align: center
+
+Using a package is as simple as placing it in a directory that EOS loads packages from. By default, this directory
+is called `user` and is located in the root of the EOS repository.
+
+Below is the directory tree of an example EOS package called "color_lab". It contains a laboratory called "color_lab",
+an experiment called "color_mixing", and various devices and tasks. It also contains additional files
+such as a Python script for launching low-level device drivers, a device client under `common`, and a README file.
+
+.. figure:: ../_static/img/example-package-tree.png
+ :alt: Example package directory tree
+ :align: center
+
+Create a Package
+----------------
+.. code-block:: shell
+
+ eos pkg create my_package
+
+This command is a shortcut to create a new package with all subdirectories. Feel free to delete subdirectories you don't
+expect to use.
diff --git a/docs/user-guide/running.rst b/docs/user-guide/running.rst
new file mode 100644
index 0000000..cda8308
--- /dev/null
+++ b/docs/user-guide/running.rst
@@ -0,0 +1,26 @@
+Running
+=======
+1. Start External Services
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+.. code-block:: shell
+
+ cd docker
+ docker compose up -d
+
+2. Source the Virtual Environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+.. code-block:: shell
+
+ source env/bin/activate
+
+3. Start the EOS Orchestrator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+.. code-block:: shell
+
+ eos orchestrator
+
+4. Start the EOS REST API
+^^^^^^^^^^^^^^^^^^^^^^^^^
+.. code-block:: shell
+
+ eos api
diff --git a/docs/user-guide/tasks.rst b/docs/user-guide/tasks.rst
new file mode 100644
index 0000000..5274741
--- /dev/null
+++ b/docs/user-guide/tasks.rst
@@ -0,0 +1,254 @@
+Tasks
+=====
+A task in EOS encapsulates an operation and can be thought of as a function. Tasks are the elementary building block
+in EOS. A task is ephemeral, meaning it is created, executed, and terminated. A task takes some inputs and returns some
+outputs, and may use one or more devices.
+
+There are two kinds of inputs: **parameters** and **containers**.
+
+#. **Parameters**: Data such as integers, decimals, strings, booleans, etc that are passed to the task.
+#. **Containers**: Vessels that may contain one or more samples.
+
+There are three kinds of outputs: **parameters**, **containers**, and **files**.
+
+#. **Parameters**: Data such as integers, decimals, strings, booleans, etc that are returned by the task.
+#. **Containers**: Vessels that may contain one or more samples.
+#. **Files**: Raw data or reports generated by the task, such as output files from analysis.
+
+.. figure:: ../_static/img/task-inputs-outputs.png
+ :alt: EOS Task Inputs and Outputs
+ :align: center
+
+Parameters
+----------
+Parameters are values that are input to a task or output from a task. Every parameter has a specific data type.
+EOS supports the following parameter types:
+
+* **integer**: An integer number; equivalent to Python's `int`
+* **decimal**: A decimal number; equivalent to Python's `float`
+* **string**: A string (series of text characters); equivalent to Python's `str`
+* **boolean**: A true/false value; equivalent to Python's `bool`
+* **choice**: A value that must be one of a set of predefined choices. The choices can be any type.
+* **list**: A list of values of a specific type. Equivalent to Python's `list`.
+* **dictionary**: A dictionary of key-value pairs. Equivalent to Python's `dict`.
+
+Tasks can have multiple parameters of different types. EOS will ensure that the parameters passed to a task are of the
+correct type and have values that meet their constraints.
+
+Containers
+----------
+Containers are referenced by a unique identifier called a **container ID**. A container ID is a string that uniquely
+identifies a container. Every container in EOS must have an ID, and these can be specified in the laboratory
+definition. Containers are treated as `global` objects and can move across labs. However, every container must
+have a "home" lab from which it originates.
+
+In order to pass a container to a task or return a container from a task, its container ID is used. Every task
+may accept specific types of containers, such as beakers, or vials. Multiple different containers can be passed. Users
+can create their own types of containers, such as `beaker_500ml` or `vial_2ml`, which specify a unique container
+type. EOS will ensure that only container types that are compatible with the task are passed to it.
+
+Files
+-----
+Files may be generated by a task and EOS will store them in an object storage (MinIO). Output files can be used to
+record raw data for future reference, and can be downloaded by the user.
+
+.. note::
+ Files cannot currently be passed as inputs to tasks via the EOS runtime and its object storage.
+ This is planned to be supported in the future. It is still possible to pass them using an external object
+ storage (e.g., MinIO), but this has to be implemented and managed manually.
+
+Task Implementation
+-------------------
+* Tasks are implemented in the `tasks` subdirectory inside an EOS package
+* Each task has its own subfolder (e.g., tasks/magnetic_mixing)
+* There are two key files per task: `task.yml` and `task.py`
+
+YAML File (task.yml)
+~~~~~~~~~~~~~~~~~~~~
+* Specifies the task type, description, and input/output parameters and containers
+* Acts as the interface contract (spec) for the task
+* This contract is used to validate tasks, and EOS enforces the contract statically and dynamically during execution
+* Useful as documentation for the task
+
+Below is an example task YAML file for a GC analysis task for GCs made by SRI Instruments:
+
+:bdg-primary:`task.yml`
+
+.. code-block:: yaml
+
+ type: SRI GC Analysis
+ description: Perform gas chromatography (GC) analysis on a sample.
+
+ device_types:
+ - sri_gas_chromatograph
+
+ input_parameters:
+ analysis_time:
+ type: integer
+ unit: seconds
+ value: 480
+ description: How long to run the GC analysis
+
+ output_parameters:
+ known_substances:
+ type: dictionary
+ description: Peaks and peak areas of identified substances
+ unknown_substances:
+ type: dictionary
+ description: Peaks and peak areas of substances that could not be identified
+
+The task specification makes clear that:
+
+* The task is of type "SRI GC Analysis"
+* The task requires a device of type "sri_gas_chromatograph". EOS will enforce this requirement.
+* The task takes an input integer parameter `analysis_time` in seconds. It has a default value of 480, making this an
+ optional parameter.
+* The task outputs two dictionaries: `known_substances` and `unknown_substances`.
+
+Parameter Specification
+"""""""""""""""""""""""
+Parameters are defined in the `input_parameters` and `output_parameters` sections of the task YAML file.
+
+Below are examples and descriptions for each parameter type:
+
+Integer
+"""""""
+.. code-block:: yaml
+
+ sample_rate:
+ type: integer
+ description: The number of samples per second
+ value: 44100
+ unit: Hz
+ min: 8000
+ max: 192000
+
+Integers must have a unit (can be n/a) and can also have a minimum and maximum value.
+
+Decimal
+"""""""
+.. code-block:: yaml
+
+ threshold_voltage:
+ type: decimal
+ description: The voltage threshold for signal detection
+ value: 2.5
+ unit: volts
+ min: 0.0
+ max: 5.0
+
+Decimals must have a unit (can be n/a) and can also have a minimum and maximum value.
+
+String
+""""""
+.. code-block:: yaml
+
+ file_prefix:
+ type: string
+ description: Prefix for output file names
+ value: "experiment_"
+
+Boolean
+"""""""
+.. code-block:: yaml
+
+ auto_calibrate:
+ type: boolean
+ description: Whether to perform auto-calibration before analysis
+ value: true
+
+Booleans are true/false values.
+
+Choice
+""""""
+.. code-block:: yaml
+
+ column_type:
+ type: choice
+ description: HPLC column type
+ value: "C18"
+ choices:
+ - "C18"
+ - "C8"
+ - "HILIC"
+ - "Phenyl-Hexyl"
+ - "Amino"
+
+Choice parameters take one of the specified choices.
+
+List
+""""
+.. code-block:: yaml
+
+ channel_gains:
+ type: list
+ description: Gain values for each input channel
+ value: [1.0, 1.2, 0.8, 1.1]
+ element_type: decimal
+ length: 4
+ min: [0.5, 0.5, 0.5, 0.5]
+ max: [2.0, 2.0, 2.0, 2.0]
+
+List parameters are a sequence of values of a specific type. They can have a specific length and minimum and maximum
+per-element values.
+
+Dictionary
+""""""""""
+.. code-block:: yaml
+
+ buffer_composition:
+ type: dictionary
+ description: Composition of a buffer solution
+ value:
+ pH: 7.4
+ base: "Tris"
+ concentration: 50
+ unit: "mM"
+ additives:
+ NaCl: 150
+ KCl: 2.7
+ CaCl2: 1.0
+ temperature: 25
+
+Dictionaries are key-value pairs. The values can be any type.
+
+Python File (task.yml)
+~~~~~~~~~~~~~~~~~~~~~~
+* Implements the task
+* All task implementations must inherit from `BaseTask`
+* The task class name must end with "Task" to be discovered by EOS
+
+:bdg-primary:`task.py`
+
+.. code-block:: python
+
+ from eos.tasks.base_task import BaseTask
+
+
+ class MagneticMixingTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ magnetic_mixer = devices.get_all_by_type("magnetic_mixer")[0]
+ mixing_time = parameters["mixing_time"]
+ mixing_speed = parameters["mixing_speed"]
+
+ containers["beaker"] = magnetic_mixer.mix(containers["beaker"], mixing_time, mixing_speed)
+
+ return None, containers, None
+
+Let's walk through this example code:
+
+`_execute` is the only required function in a task implementation. It is called by EOS to execute a task. The function
+takes three arguments:
+
+#. `devices`: A data structure supporting lookup of specific lab devices assigned to a task. In this case, only one
+device is given, a magnetic mixer. The devices are represented as wrappers to Ray actor references, and the task
+implementation can call functions from the device implementation.
+
+#. `parameters`: A dictionary of the input parameters. Keys are the parameter names and values are the parameter values.
+
+#. `containers`: A dictionary of the input containers. Keys are the container IDs and values are the `Container` objects.
diff --git a/eos/__init__.py b/eos/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/campaigns/__init__.py b/eos/campaigns/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/campaigns/campaign_executor.py b/eos/campaigns/campaign_executor.py
new file mode 100644
index 0000000..34b460a
--- /dev/null
+++ b/eos/campaigns/campaign_executor.py
@@ -0,0 +1,332 @@
+import asyncio
+from typing import Any, TYPE_CHECKING
+
+import pandas as pd
+
+from eos.campaigns.campaign_manager import CampaignManager
+from eos.campaigns.campaign_optimizer_manager import CampaignOptimizerManager
+from eos.campaigns.entities.campaign import CampaignStatus, Campaign, CampaignExecutionParameters
+from eos.campaigns.exceptions import EosCampaignExecutionError
+from eos.experiments.entities.experiment import ExperimentStatus, ExperimentExecutionParameters
+from eos.experiments.exceptions import EosExperimentCancellationError, EosExperimentExecutionError
+from eos.experiments.experiment_executor_factory import ExperimentExecutorFactory
+from eos.logging.logger import log
+from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+from eos.tasks.task_manager import TaskManager
+from eos.utils import dict_utils
+
+if TYPE_CHECKING:
+ from eos.experiments.experiment_executor import ExperimentExecutor
+
+
+class CampaignExecutor:
+ def __init__(
+ self,
+ campaign_id: str,
+ experiment_type: str,
+ execution_parameters: CampaignExecutionParameters,
+ campaign_manager: CampaignManager,
+ campaign_optimizer_manager: CampaignOptimizerManager,
+ task_manager: TaskManager,
+ experiment_executor_factory: ExperimentExecutorFactory,
+ ):
+ self._campaign_id = campaign_id
+ self._experiment_type = experiment_type
+ self._execution_parameters = execution_parameters
+ self._campaign_manager = campaign_manager
+ self._campaign_optimizer_manager = campaign_optimizer_manager
+ self._task_manager = task_manager
+ self._experiment_executor_factory = experiment_executor_factory
+
+ self._optimizer = None
+ self._optimizer_input_names: list[str] = []
+ self._optimizer_output_names: list[str] = []
+
+ self._experiment_executors: dict[str, ExperimentExecutor] = {}
+
+ self._campaign_status: CampaignStatus | None = None
+
+ def _setup_optimizer(self) -> None:
+ if self._optimizer:
+ return
+
+ self._optimizer = self._campaign_optimizer_manager.create_campaign_optimizer_actor(
+ self._experiment_type,
+ self._campaign_id,
+ self._execution_parameters.optimizer_computer_ip,
+ )
+ self._optimizer_input_names, self._optimizer_output_names = (
+ self._campaign_optimizer_manager.get_input_and_output_names(self._campaign_id)
+ )
+
+ def cleanup(self) -> None:
+ """
+ Clean up resources when the campaign executor is no longer needed.
+ """
+ if self._execution_parameters.do_optimization:
+ self._campaign_optimizer_manager.terminate_campaign_optimizer_actor(self._campaign_id)
+
+ async def start_campaign(self) -> None:
+ """
+ Start the campaign or handle an existing campaign.
+ """
+ campaign = self._campaign_manager.get_campaign(self._campaign_id)
+ if campaign:
+ await self._handle_existing_campaign(campaign)
+ else:
+ self._create_new_campaign()
+
+ self._campaign_manager.start_campaign(self._campaign_id)
+ self._campaign_status = CampaignStatus.RUNNING
+ log.info(f"Started campaign '{self._campaign_id}'.")
+
+ async def _handle_existing_campaign(self, campaign: Campaign) -> None:
+ """
+ Handle cases when the campaign already exists.
+ """
+ self._campaign_status = campaign.status
+
+ if not self._execution_parameters.resume:
+ def _raise_error(status: str) -> None:
+ raise EosCampaignExecutionError(
+ f"Cannot start campaign '{self._campaign_id}' as it already exists and is '{status}'. "
+ f"Please create a new campaign or re-submit with 'resume=True'."
+ )
+
+ status_handlers = {
+ CampaignStatus.COMPLETED: lambda: _raise_error("completed"),
+ CampaignStatus.SUSPENDED: lambda: _raise_error("suspended"),
+ CampaignStatus.CANCELLED: lambda: _raise_error("cancelled"),
+ CampaignStatus.FAILED: lambda: _raise_error("failed"),
+ }
+ status_handlers.get(self._campaign_status, lambda: None)()
+
+ await self._resume_campaign()
+
+ def _create_new_campaign(self) -> None:
+ """
+ Create a new campaign.
+ """
+ self._campaign_manager.create_campaign(
+ campaign_id=self._campaign_id,
+ experiment_type=self._experiment_type,
+ execution_parameters=self._execution_parameters,
+ )
+
+ if self._execution_parameters.do_optimization:
+ self._setup_optimizer()
+
+ async def _resume_campaign(self) -> None:
+ """
+ Resume an existing campaign.
+ """
+ self._campaign_manager.delete_current_campaign_experiments(self._campaign_id)
+
+ if self._execution_parameters.do_optimization:
+ self._setup_optimizer()
+ await self._restore_optimizer_state()
+
+ log.info(f"Campaign '{self._campaign_id}' resumed.")
+
+ async def _restore_optimizer_state(self) -> None:
+ """
+ Restore the optimizer state for a resumed campaign.
+ """
+ completed_experiment_ids = self._campaign_manager.get_campaign_experiment_ids(
+ self._campaign_id, status=ExperimentStatus.COMPLETED
+ )
+
+ inputs_df, outputs_df = await self._collect_experiment_results(completed_experiment_ids)
+
+ await self._optimizer.report.remote(inputs_df, outputs_df)
+
+ log.info(
+ f"CMP '{self._campaign_id}' - Restored optimizer state with {len(completed_experiment_ids)} "
+ f"completed experiments."
+ )
+
+ async def cancel_campaign(self) -> None:
+ """
+ Cancel the campaign and all running experiments.
+ """
+ campaign = self._campaign_manager.get_campaign(self._campaign_id)
+ if not campaign or campaign.status != CampaignStatus.RUNNING:
+ raise EosCampaignExecutionError(
+ f"Cannot cancel campaign '{self._campaign_id}' with status "
+ f"'{campaign.status if campaign else 'None'}'. It must be running."
+ )
+
+ log.warning(f"Cancelling campaign '{self._campaign_id}'...")
+ self._campaign_manager.cancel_campaign(self._campaign_id)
+ self._campaign_status = CampaignStatus.CANCELLED
+
+ await self._cancel_running_experiments()
+
+ log.warning(f"Cancelled campaign '{self._campaign_id}'.")
+
+ async def _cancel_running_experiments(self) -> None:
+ """
+ Cancel all running experiments in the campaign.
+ """
+ cancellation_tasks = [executor.cancel_experiment() for executor in self._experiment_executors.values()]
+ try:
+ await asyncio.wait_for(asyncio.gather(*cancellation_tasks, return_exceptions=True), timeout=30)
+ except asyncio.TimeoutError as e:
+ raise EosCampaignExecutionError(
+ f"CMP '{self._campaign_id}' - Timed out while cancelling experiments. "
+ f"Some experiments may still be running."
+ ) from e
+ except EosExperimentCancellationError as e:
+ raise EosCampaignExecutionError(
+ f"CMP '{self._campaign_id}' - Error cancelling experiments. Some experiments may still "
+ f"be running."
+ ) from e
+
+ async def progress_campaign(self) -> bool:
+ """
+ Progress the campaign by executing experiments.
+ Returns True if the campaign is completed, False otherwise.
+ """
+ try:
+ if self._campaign_status != CampaignStatus.RUNNING:
+ return self._campaign_status == CampaignStatus.CANCELLED
+
+ await self._progress_experiments()
+
+ campaign = self._campaign_manager.get_campaign(self._campaign_id)
+ if self._is_campaign_completed(campaign):
+ if self._execution_parameters.do_optimization:
+ await self._compute_pareto_solutions()
+ self._campaign_manager.complete_campaign(self._campaign_id)
+ return True
+
+ await self._create_experiments(campaign)
+
+ return False
+ except EosExperimentExecutionError as e:
+ self._campaign_manager.fail_campaign(self._campaign_id)
+ self._campaign_status = CampaignStatus.FAILED
+ raise EosCampaignExecutionError(f"Error executing campaign '{self._campaign_id}'") from e
+
+ async def _progress_experiments(self) -> None:
+ """
+ Progress all running experiments sequentially and process completed ones.
+ """
+ completed_experiments = []
+
+ for experiment_id, executor in self._experiment_executors.items():
+ is_completed = await executor.progress_experiment()
+ if is_completed:
+ completed_experiments.append(experiment_id)
+
+ if self._execution_parameters.do_optimization and completed_experiments:
+ await self._process_completed_experiments(completed_experiments)
+
+ for experiment_id in completed_experiments:
+ del self._experiment_executors[experiment_id]
+ self._campaign_manager.delete_campaign_experiment(self._campaign_id, experiment_id)
+ self._campaign_manager.increment_iteration(self._campaign_id)
+
+ async def _process_completed_experiments(self, completed_experiments: list[str]) -> None:
+ """
+ Process the results of completed experiments.
+ """
+ inputs_df, outputs_df = await self._collect_experiment_results(completed_experiments)
+ await self._optimizer.report.remote(inputs_df, outputs_df)
+ self._campaign_optimizer_manager.record_campaign_samples(
+ self._campaign_id, completed_experiments, inputs_df, outputs_df
+ )
+
+ async def _collect_experiment_results(self, experiment_ids: list[str]) -> tuple[pd.DataFrame, pd.DataFrame]:
+ """
+ Collect the results of completed experiments.
+ """
+ inputs = {input_name: [] for input_name in self._optimizer_input_names}
+ outputs = {output_name: [] for output_name in self._optimizer_output_names}
+
+ for experiment_id in experiment_ids:
+ for input_name in self._optimizer_input_names:
+ reference_task_id, parameter_name = input_name.split(".")
+ task = self._task_manager.get_task(experiment_id, reference_task_id)
+ inputs[input_name].append(float(task.input.parameters[parameter_name]))
+ for output_name in self._optimizer_output_names:
+ reference_task_id, parameter_name = output_name.split(".")
+ output_parameters = self._task_manager.get_task_output(experiment_id, reference_task_id).parameters
+ outputs[output_name].append(float(output_parameters[parameter_name]))
+
+ return pd.DataFrame(inputs), pd.DataFrame(outputs)
+
+ async def _create_experiments(self, campaign: Campaign) -> None:
+ """
+ Create new experiments if possible.
+ """
+ while self._can_create_more_experiments(campaign):
+ iteration = campaign.experiments_completed + len(self._experiment_executors)
+ new_experiment_id = f"{self._campaign_id}_exp_{iteration + 1}"
+
+ experiment_dynamic_parameters = await self._get_experiment_parameters(iteration)
+
+ experiment_execution_parameters = ExperimentExecutionParameters()
+ experiment_executor = self._experiment_executor_factory.create(
+ new_experiment_id, self._experiment_type, experiment_execution_parameters
+ )
+ self._campaign_manager.add_campaign_experiment(self._campaign_id, new_experiment_id)
+ self._experiment_executors[new_experiment_id] = experiment_executor
+ experiment_executor.start_experiment(experiment_dynamic_parameters)
+
+ async def _get_experiment_parameters(self, iteration: int) -> dict[str, Any]:
+ """
+ Get parameters for a new experiment.
+ """
+ campaign_dynamic_parameters = self._execution_parameters.dynamic_parameters
+
+ if campaign_dynamic_parameters and len(campaign_dynamic_parameters) > iteration:
+ return campaign_dynamic_parameters[iteration]
+ if self._execution_parameters.do_optimization:
+ log.info(f"CMP '{self._campaign_id}' - Sampling new parameters from the optimizer...")
+ new_parameters = await self._optimizer.sample.remote(1)
+ new_parameters = new_parameters.to_dict(orient="records")[0]
+ log.debug(f"CMP '{self._campaign_id}' - Sampled parameters: {new_parameters}")
+ return dict_utils.unflatten_dict(new_parameters)
+
+ raise EosCampaignExecutionError(
+ f"CMP '{self._campaign_id}' - No dynamic parameters provided for iteration {iteration}."
+ )
+
+ def _can_create_more_experiments(self, campaign: Campaign) -> bool:
+ """
+ Check if more experiments can be created.
+ """
+ num_executors = len(self._experiment_executors)
+ max_concurrent = self._execution_parameters.max_concurrent_experiments
+ max_total = self._execution_parameters.max_experiments
+ current_total = campaign.experiments_completed + num_executors
+
+ return num_executors < max_concurrent and (max_total == 0 or current_total < max_total)
+
+ def _is_campaign_completed(self, campaign: Campaign) -> bool:
+ """
+ Check if the campaign is completed.
+ """
+ max_experiments = self._execution_parameters.max_experiments
+ return (
+ max_experiments > 0
+ and campaign.experiments_completed >= max_experiments
+ and len(self._experiment_executors) == 0
+ )
+
+ async def _compute_pareto_solutions(self) -> None:
+ """
+ Compute and store Pareto solutions for the campaign.
+ """
+ log.info(f"Computing Pareto solutions for campaign '{self._campaign_id}'...")
+ try:
+ pareto_solutions_df = await self._optimizer.get_optimal_solutions.remote()
+ pareto_solutions = pareto_solutions_df.to_dict(orient="records")
+ self._campaign_manager.set_pareto_solutions(self._campaign_id, pareto_solutions)
+ except Exception as e:
+ raise EosCampaignExecutionError(f"CMP '{self._campaign_id}' - Error computing Pareto solutions.") from e
+
+ @property
+ def optimizer(self) -> AbstractSequentialOptimizer:
+ return self._optimizer
diff --git a/eos/campaigns/campaign_executor_factory.py b/eos/campaigns/campaign_executor_factory.py
new file mode 100644
index 0000000..d9be256
--- /dev/null
+++ b/eos/campaigns/campaign_executor_factory.py
@@ -0,0 +1,45 @@
+from eos.campaigns.campaign_executor import CampaignExecutor
+from eos.campaigns.campaign_manager import CampaignManager
+from eos.campaigns.campaign_optimizer_manager import CampaignOptimizerManager
+from eos.campaigns.entities.campaign import CampaignExecutionParameters
+from eos.configuration.configuration_manager import ConfigurationManager
+
+from eos.experiments.experiment_executor_factory import ExperimentExecutorFactory
+
+from eos.tasks.task_manager import TaskManager
+
+
+class CampaignExecutorFactory:
+ """
+ Factory class to create CampaignExecutor instances.
+ """
+
+ def __init__(
+ self,
+ configuration_manager: ConfigurationManager,
+ campaign_manager: CampaignManager,
+ campaign_optimizer_manager: CampaignOptimizerManager,
+ task_manager: TaskManager,
+ experiment_executor_factory: ExperimentExecutorFactory,
+ ):
+ self._configuration_manager = configuration_manager
+ self._campaign_manager = campaign_manager
+ self._campaign_optimizer_manager = campaign_optimizer_manager
+ self._task_manager = task_manager
+ self._experiment_executor_factory = experiment_executor_factory
+
+ def create(
+ self,
+ campaign_id: str,
+ experiment_type: str,
+ execution_parameters: CampaignExecutionParameters,
+ ) -> CampaignExecutor:
+ return CampaignExecutor(
+ campaign_id,
+ experiment_type,
+ execution_parameters,
+ self._campaign_manager,
+ self._campaign_optimizer_manager,
+ self._task_manager,
+ self._experiment_executor_factory,
+ )
diff --git a/eos/campaigns/campaign_manager.py b/eos/campaigns/campaign_manager.py
new file mode 100644
index 0000000..3cc076a
--- /dev/null
+++ b/eos/campaigns/campaign_manager.py
@@ -0,0 +1,178 @@
+from datetime import datetime, timezone
+from typing import Any
+
+from eos.campaigns.entities.campaign import Campaign, CampaignStatus, CampaignExecutionParameters
+from eos.campaigns.exceptions import EosCampaignStateError
+from eos.campaigns.repositories.campaign_repository import CampaignRepository
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.experiments.entities.experiment import ExperimentStatus
+from eos.experiments.repositories.experiment_repository import ExperimentRepository
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.tasks.repositories.task_repository import TaskRepository
+
+
+class CampaignManager:
+ """
+ Responsible for managing the state of all experiment campaigns in EOS and tracking their execution.
+ """
+
+ def __init__(self, configuration_manager: ConfigurationManager, db_manager: DbManager):
+ self._configuration_manager = configuration_manager
+ self._campaigns = CampaignRepository("campaigns", db_manager)
+ self._campaigns.create_indices([("id", 1)], unique=True)
+ self._experiments = ExperimentRepository("experiments", db_manager)
+ self._tasks = TaskRepository("tasks", db_manager)
+
+ log.debug("Campaign manager initialized.")
+
+ def create_campaign(
+ self,
+ campaign_id: str,
+ experiment_type: str,
+ execution_parameters: CampaignExecutionParameters,
+ metadata: dict[str, Any] | None = None,
+ ) -> None:
+ """
+ Create a new campaign of a given experiment type with a unique id.
+
+ :param campaign_id: A unique id for the campaign.
+ :param experiment_type: The type of the experiment as defined in the configuration.
+ :param execution_parameters: Parameters for the execution of the campaign.
+ :param metadata: Additional metadata to be stored with the campaign.
+ """
+ if self._campaigns.get_one(id=campaign_id):
+ raise EosCampaignStateError(f"Campaign '{campaign_id}' already exists.")
+
+ experiment_config = self._configuration_manager.experiments.get(experiment_type)
+ if not experiment_config:
+ raise EosCampaignStateError(f"Experiment type '{experiment_type}' not found in the configuration.")
+
+ campaign = Campaign(
+ id=campaign_id,
+ experiment_type=experiment_type,
+ execution_parameters=execution_parameters,
+ metadata=metadata or {},
+ )
+ self._campaigns.create(campaign.model_dump())
+
+ log.info(f"Created campaign '{campaign_id}'.")
+
+ def delete_campaign(self, campaign_id: str) -> None:
+ """
+ Delete a campaign.
+ """
+ if not self._campaigns.exists(id=campaign_id):
+ raise EosCampaignStateError(f"Campaign '{campaign_id}' does not exist.")
+
+ self._campaigns.delete(id=campaign_id)
+
+ log.info(f"Deleted campaign '{campaign_id}'.")
+
+ def start_campaign(self, campaign_id: str) -> None:
+ """
+ Start a campaign.
+ """
+ self._set_campaign_status(campaign_id, CampaignStatus.RUNNING)
+
+ def complete_campaign(self, campaign_id: str) -> None:
+ """
+ Complete a campaign.
+ """
+ self._set_campaign_status(campaign_id, CampaignStatus.COMPLETED)
+
+ def cancel_campaign(self, campaign_id: str) -> None:
+ """
+ Cancel a campaign.
+ """
+ self._set_campaign_status(campaign_id, CampaignStatus.CANCELLED)
+
+ def suspend_campaign(self, campaign_id: str) -> None:
+ """
+ Suspend a campaign.
+ """
+ self._set_campaign_status(campaign_id, CampaignStatus.SUSPENDED)
+
+ def fail_campaign(self, campaign_id: str) -> None:
+ """
+ Fail a campaign.
+ """
+ self._set_campaign_status(campaign_id, CampaignStatus.FAILED)
+
+ def get_campaign(self, campaign_id: str) -> Campaign | None:
+ """
+ Get a campaign.
+ """
+ campaign = self._campaigns.get_one(id=campaign_id)
+ return Campaign(**campaign) if campaign else None
+
+ def get_campaigns(self, **query: dict[str, Any]) -> list[Campaign]:
+ """
+ Query campaigns with arbitrary parameters.
+
+ :param query: Dictionary of query parameters.
+ """
+ campaigns = self._campaigns.get_all(**query)
+ return [Campaign(**campaign) for campaign in campaigns]
+
+ def _set_campaign_status(self, campaign_id: str, new_status: CampaignStatus) -> None:
+ """
+ Set the status of a campaign.
+ """
+ update_fields = {"status": new_status.value}
+ if new_status == CampaignStatus.RUNNING:
+ update_fields["start_time"] = datetime.now(tz=timezone.utc)
+ elif new_status in [
+ CampaignStatus.COMPLETED,
+ CampaignStatus.CANCELLED,
+ CampaignStatus.FAILED,
+ ]:
+ update_fields["end_time"] = datetime.now(tz=timezone.utc)
+
+ self._campaigns.update(update_fields, id=campaign_id)
+
+ def increment_iteration(self, campaign_id: str) -> None:
+ """
+ Increment the iteration count of a campaign.
+ """
+ self._campaigns.increment_campaign_iteration(campaign_id)
+
+ def add_campaign_experiment(self, campaign_id: str, experiment_id: str) -> None:
+ """
+ Add an experiment to a campaign.
+ """
+ self._campaigns.add_current_experiment(campaign_id, experiment_id)
+
+ def delete_campaign_experiment(self, campaign_id: str, experiment_id: str) -> None:
+ """
+ Remove an experiment from a campaign.
+ """
+ self._campaigns.remove_current_experiment(campaign_id, experiment_id)
+
+ def delete_current_campaign_experiments(self, campaign_id: str) -> None:
+ """
+ Delete all current experiments from a campaign.
+ """
+ campaign = self.get_campaign(campaign_id)
+
+ for experiment_id in campaign.current_experiment_ids:
+ self._experiments.delete(id=experiment_id)
+ self._tasks.delete(experiment_id=experiment_id)
+
+ self._campaigns.clear_current_experiments(campaign_id)
+
+ def get_campaign_experiment_ids(self, campaign_id: str, status: ExperimentStatus | None = None) -> list[str]:
+ """
+ Get all experiment IDs of a campaign with an optional status filter.
+
+ :param campaign_id: The ID of the campaign.
+ :param status: Optional status to filter experiments.
+ :return: A list of experiment IDs.
+ """
+ return self._experiments.get_experiment_ids_by_campaign(campaign_id, status)
+
+ def set_pareto_solutions(self, campaign_id: str, pareto_solutions: dict[str, Any]) -> None:
+ """
+ Set the Pareto solutions for a campaign.
+ """
+ self._campaigns.update({"pareto_solutions": pareto_solutions}, id=campaign_id)
diff --git a/eos/campaigns/campaign_optimizer_manager.py b/eos/campaigns/campaign_optimizer_manager.py
new file mode 100644
index 0000000..d3c19c9
--- /dev/null
+++ b/eos/campaigns/campaign_optimizer_manager.py
@@ -0,0 +1,123 @@
+import pandas as pd
+import ray
+from ray.actor import ActorHandle
+
+from eos.campaigns.entities.campaign import CampaignSample
+from eos.configuration.plugin_registries.campaign_optimizer_plugin_registry import CampaignOptimizerPluginRegistry
+from eos.logging.logger import log
+from eos.optimization.sequential_optimizer_actor import SequentialOptimizerActor
+from eos.persistence.db_manager import DbManager
+from eos.persistence.mongo_repository import MongoRepository
+
+
+class CampaignOptimizerManager:
+ """
+ Responsible for managing the optimizers associated with experiment campaigns.
+ """
+
+ def __init__(self, db_manager: DbManager):
+ self._campaign_samples = MongoRepository("campaign_samples", db_manager)
+ self._campaign_samples.create_indices([("campaign_id", 1), ("experiment_id", 1)], unique=True)
+
+ self._campaign_optimizer_plugin_registry = CampaignOptimizerPluginRegistry()
+
+ self._optimizer_actors: dict[str, ActorHandle] = {}
+
+ log.debug("Campaign optimizer manager initialized.")
+
+ def create_campaign_optimizer_actor(self, experiment_type: str, campaign_id: str, computer_ip: str) -> ActorHandle:
+ """
+ Create a new campaign optimizer Ray actor.
+
+ :param experiment_type: The type of the experiment.
+ :param campaign_id: The ID of the campaign.
+ :param computer_ip: The IP address of the optimizer computer on which the actor will run.
+ """
+ constructor_args, optimizer_type = (
+ self._campaign_optimizer_plugin_registry.get_campaign_optimizer_creation_parameters(experiment_type)
+ )
+
+ resources = {"eos-core": 0.01} if computer_ip in ["localhost", "127.0.0.1"] else {f"node:{computer_ip}": 0.01}
+
+ optimizer_actor = SequentialOptimizerActor.options(name=f"{campaign_id}_optimizer", resources=resources).remote(
+ constructor_args, optimizer_type
+ )
+
+ self._optimizer_actors[campaign_id] = optimizer_actor
+
+ return optimizer_actor
+
+ def terminate_campaign_optimizer_actor(self, campaign_id: str) -> None:
+ """
+ Terminate the Ray actor associated with the optimizer for a campaign.
+
+ :param campaign_id: The ID of the campaign.
+ """
+ optimizer_actor = self._optimizer_actors.pop(campaign_id, None)
+
+ if optimizer_actor is not None:
+ ray.kill(optimizer_actor)
+
+ def get_campaign_optimizer_actor(self, campaign_id: str) -> ActorHandle:
+ """
+ Get an existing Ray actor associated with the optimizer for a campaign.
+
+ :param campaign_id: The ID of the campaign.
+ :return: The Ray actor associated with the optimizer.
+ """
+ return self._optimizer_actors[campaign_id]
+
+ def get_input_and_output_names(self, campaign_id: str) -> tuple[list[str], list[str]]:
+ """
+ Get the input and output names from an optimizer associated with a campaign.
+
+ :param campaign_id: The ID of the campaign associated with the optimizer.
+ :return: A tuple containing the input and output names.
+ """
+ optimizer_actor = self._optimizer_actors[campaign_id]
+
+ input_names, output_names = ray.get(
+ [optimizer_actor.get_input_names.remote(), optimizer_actor.get_output_names.remote()]
+ )
+
+ return input_names, output_names
+
+ def record_campaign_samples(
+ self,
+ campaign_id: str,
+ experiment_ids: list[str],
+ inputs: pd.DataFrame,
+ outputs: pd.DataFrame,
+ ) -> None:
+ """
+ Record one or more campaign samples (experiment results) for the given campaign.
+ Each sample is a data point for the optimizer to learn from.
+
+ :param campaign_id: The ID of the campaign.
+ :param experiment_ids: The IDs of the experiments.
+ :param inputs: The input data.
+ :param outputs: The output data.
+ """
+ inputs_dict = inputs.to_dict(orient="records")
+ outputs_dict = outputs.to_dict(orient="records")
+
+ campaign_samples = [
+ CampaignSample(
+ campaign_id=campaign_id,
+ experiment_id=experiment_id,
+ inputs=inputs_dict[i],
+ outputs=outputs_dict[i],
+ )
+ for i, experiment_id in enumerate(experiment_ids)
+ ]
+
+ for campaign_sample in campaign_samples:
+ self._campaign_samples.create(campaign_sample.model_dump())
+
+ def delete_campaign_samples(self, campaign_id: str) -> None:
+ """
+ Delete all campaign samples for a campaign.
+
+ :param campaign_id: The ID of the campaign.
+ """
+ self._campaign_samples.delete(campaign_id=campaign_id)
diff --git a/eos/campaigns/entities/__init__.py b/eos/campaigns/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/campaigns/entities/campaign.py b/eos/campaigns/entities/campaign.py
new file mode 100644
index 0000000..2eda803
--- /dev/null
+++ b/eos/campaigns/entities/campaign.py
@@ -0,0 +1,71 @@
+from datetime import datetime, timezone
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, field_serializer, Field, model_validator
+
+
+class CampaignExecutionParameters(BaseModel):
+ max_experiments: int = Field(0, ge=0)
+ max_concurrent_experiments: int = Field(1, ge=1)
+
+ do_optimization: bool
+ optimizer_computer_ip: str = "127.0.0.1"
+ dynamic_parameters: list[dict[str, dict[str, Any]]] | None = None
+
+ resume: bool = False
+
+ @model_validator(mode="after")
+ def validate_dynamic_parameters(self) -> None:
+ if not self.do_optimization:
+ if not self.dynamic_parameters:
+ raise ValueError("Campaign dynamic parameters must be provided if optimization is not enabled.")
+ if len(self.dynamic_parameters) != self.max_experiments:
+ raise ValueError(
+ "Dynamic parameters must be provided for all experiments up to the max experiments if "
+ "optimization is not enabled."
+ )
+ return self
+
+
+class CampaignStatus(Enum):
+ CREATED = "CREATED"
+ RUNNING = "RUNNING"
+ COMPLETED = "COMPLETED"
+ SUSPENDED = "SUSPENDED"
+ CANCELLED = "CANCELLED"
+ FAILED = "FAILED"
+
+
+class Campaign(BaseModel):
+ id: str
+ experiment_type: str
+
+ execution_parameters: CampaignExecutionParameters
+
+ status: CampaignStatus = CampaignStatus.CREATED
+ experiments_completed: int = Field(0, ge=0)
+ current_experiment_ids: list[str] = []
+
+ pareto_solutions: list[dict[str, Any]] | None = None
+
+ metadata: dict[str, Any] = {}
+
+ start_time: datetime | None = None
+ end_time: datetime | None = None
+
+ created_at: datetime = datetime.now(tz=timezone.utc)
+
+ @field_serializer("status")
+ def status_enum_to_string(self, v: CampaignStatus) -> str:
+ return v.value
+
+
+class CampaignSample(BaseModel):
+ campaign_id: str
+ experiment_id: str
+
+ inputs: dict[str, Any]
+ outputs: dict[str, Any]
+
+ created_at: datetime = datetime.now(tz=timezone.utc)
diff --git a/eos/campaigns/exceptions.py b/eos/campaigns/exceptions.py
new file mode 100644
index 0000000..861b624
--- /dev/null
+++ b/eos/campaigns/exceptions.py
@@ -0,0 +1,10 @@
+class EosCampaignError(Exception):
+ pass
+
+
+class EosCampaignStateError(EosCampaignError):
+ pass
+
+
+class EosCampaignExecutionError(EosCampaignError):
+ pass
diff --git a/eos/campaigns/repositories/__init__.py b/eos/campaigns/repositories/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/campaigns/repositories/campaign_repository.py b/eos/campaigns/repositories/campaign_repository.py
new file mode 100644
index 0000000..f7acb9a
--- /dev/null
+++ b/eos/campaigns/repositories/campaign_repository.py
@@ -0,0 +1,30 @@
+from eos.campaigns.exceptions import EosCampaignStateError
+from eos.persistence.mongo_repository import MongoRepository
+
+
+class CampaignRepository(MongoRepository):
+ def increment_campaign_iteration(self, campaign_id: str) -> None:
+ result = self._collection.update_one({"id": campaign_id}, {"$inc": {"experiments_completed": 1}})
+
+ if result.matched_count == 0:
+ raise EosCampaignStateError(
+ f"Cannot increment the iteration of campaign '{campaign_id}' as it does not exist."
+ )
+
+ def add_current_experiment(self, campaign_id: str, experiment_id: str) -> None:
+ self._collection.update_one(
+ {"id": campaign_id},
+ {"$addToSet": {"current_experiment_ids": experiment_id}},
+ )
+
+ def remove_current_experiment(self, campaign_id: str, experiment_id: str) -> None:
+ self._collection.update_one(
+ {"id": campaign_id},
+ {"$pull": {"current_experiment_ids": experiment_id}},
+ )
+
+ def clear_current_experiments(self, campaign_id: str) -> None:
+ self._collection.update_one(
+ {"id": campaign_id},
+ {"$set": {"current_experiment_ids": []}},
+ )
diff --git a/eos/cli/__init__.py b/eos/cli/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/cli/orchestrator_cli.py b/eos/cli/orchestrator_cli.py
new file mode 100644
index 0000000..095e4ab
--- /dev/null
+++ b/eos/cli/orchestrator_cli.py
@@ -0,0 +1,185 @@
+import asyncio
+import contextlib
+import functools
+import os
+import signal
+from contextlib import AbstractAsyncContextManager
+from pathlib import Path
+from typing import Annotated
+
+import typer
+import uvicorn
+from litestar import Litestar, Router
+from litestar.di import Provide
+from litestar.logging import LoggingConfig
+from omegaconf import OmegaConf, DictConfig
+
+from eos.logging.logger import log, LogLevel
+from eos.orchestration.orchestrator import Orchestrator
+from eos.persistence.service_credentials import ServiceCredentials
+from eos.web_api.orchestrator.controllers.campaign_controller import CampaignController
+from eos.web_api.orchestrator.controllers.experiment_controller import ExperimentController
+from eos.web_api.orchestrator.controllers.file_controller import FileController
+from eos.web_api.orchestrator.controllers.lab_controller import LabController
+from eos.web_api.orchestrator.controllers.task_controller import TaskController
+from eos.web_api.orchestrator.exception_handling import global_exception_handler
+
+default_config = {
+ "user_dir": "./user",
+ "labs": [],
+ "experiments": [],
+ "log_level": "INFO",
+ "web_api": {
+ "host": "localhost",
+ "port": 8070,
+ },
+ "db": {
+ "host": "localhost",
+ "port": 27017,
+ "username": None,
+ "password": None,
+ },
+ "file_db": {
+ "host": "localhost",
+ "port": 9004,
+ "username": None,
+ "password": None,
+ },
+}
+
+eos_banner = r"""The Experiment Orchestration System
+ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄
+▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌
+▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀▀▀
+▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░▌▐░█▄▄▄▄▄▄▄▄▄
+▐░░░░░░░░░░░▌▐░▌ ▐░▌▐░░░░░░░░░░░▌
+▐░█▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░▌ ▀▀▀▀▀▀▀▀▀█░▌
+▐░█▄▄▄▄▄▄▄▄▄ ▐░█▄▄▄▄▄▄▄█░▌ ▄▄▄▄▄▄▄▄▄█░▌
+▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌
+ ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀
+"""
+
+
+def load_config(config_file: str) -> DictConfig:
+ if not Path(config_file).exists():
+ raise FileNotFoundError(f"Config file '{config_file}' does not exist")
+ return OmegaConf.merge(OmegaConf.create(default_config), OmegaConf.load(config_file))
+
+
+def parse_list_arg(arg: str | None) -> list[str]:
+ return [item.strip() for item in arg.split(",")] if arg else []
+
+
+@contextlib.asynccontextmanager
+async def handle_shutdown(
+ orchestrator: Orchestrator, web_api_server: uvicorn.Server
+) -> AbstractAsyncContextManager[None]:
+ class GracefulExit(SystemExit):
+ pass
+
+ loop = asyncio.get_running_loop()
+ shutdown_initiated = False
+
+ def signal_handler(*_) -> None:
+ nonlocal shutdown_initiated
+ if not shutdown_initiated:
+ log.warning("Shut down signal received.")
+ shutdown_initiated = True
+ raise GracefulExit()
+
+ for sig in (signal.SIGINT, signal.SIGTERM):
+ loop.add_signal_handler(sig, functools.partial(signal_handler))
+
+ try:
+ yield
+ except GracefulExit:
+ pass
+ finally:
+ log.info("Shutting down the internal web API server...")
+ web_api_server.should_exit = True
+ await web_api_server.shutdown()
+
+ log.info("Shutting down the orchestrator...")
+ orchestrator.terminate()
+
+ log.info("Shutdown complete.")
+
+
+async def run_all(orchestrator: Orchestrator, web_api_server: uvicorn.Server) -> None:
+ async with handle_shutdown(orchestrator, web_api_server):
+ orchestrator_task = asyncio.create_task(orchestrator.spin())
+ web_server_task = asyncio.create_task(web_api_server.serve())
+
+ await asyncio.gather(orchestrator_task, web_server_task)
+
+
+def start_orchestrator(
+ config_file: Annotated[
+ str, typer.Option("--config", "-c", help="Path to the EOS configuration file")
+ ] = "./config.yml",
+ user_dir: (
+ Annotated[str, typer.Option("--user-dir", "-u", help="The directory containing EOS user configurations")] | None
+ ) = None,
+ labs: (
+ Annotated[str, typer.Option("--labs", "-l", help="Comma-separated list of lab configurations to load")] | None
+ ) = None,
+ experiments: (
+ Annotated[
+ str,
+ typer.Option("--experiments", "-e", help="Comma-separated list of experiment configurations to load"),
+ ]
+ | None
+ ) = None,
+ log_level: Annotated[LogLevel, typer.Option("--log-level", "-v", help="Logging level")] = None,
+) -> None:
+
+ typer.echo(eos_banner)
+
+ file_config = load_config(config_file)
+ cli_config = {}
+ if user_dir is not None:
+ cli_config["user_dir"] = user_dir
+ if labs is not None:
+ cli_config["labs"] = parse_list_arg(labs)
+ if experiments is not None:
+ cli_config["experiments"] = parse_list_arg(experiments)
+ if log_level is not None:
+ cli_config["log_level"] = log_level.value
+ config = OmegaConf.merge(file_config, OmegaConf.create(cli_config))
+
+ log.set_level(config.log_level)
+
+ # Set up the orchestrator
+ db_credentials = ServiceCredentials(**config.db)
+ file_db_credentials = ServiceCredentials(**config.file_db)
+ orchestrator = Orchestrator(config.user_dir, db_credentials, file_db_credentials)
+ orchestrator.load_labs(config.labs)
+ orchestrator.load_experiments(config.experiments)
+
+ # Set up the web API server
+ logging_config = LoggingConfig(
+ configure_root_logger=False,
+ loggers={
+ "litestar": {"level": "CRITICAL"},
+ },
+ )
+ os.environ["LITESTAR_WARN_IMPLICIT_SYNC_TO_THREAD"] = "0"
+
+ def orchestrator_provider() -> Orchestrator:
+ return orchestrator
+
+ api_router = Router(
+ path="/api",
+ route_handlers=[TaskController, ExperimentController, CampaignController, LabController, FileController],
+ dependencies={"orchestrator": Provide(orchestrator_provider)},
+ exception_handlers={Exception: global_exception_handler},
+ )
+ web_api_app = Litestar(
+ route_handlers=[api_router],
+ logging_config=logging_config,
+ exception_handlers={Exception: global_exception_handler},
+ )
+ config = uvicorn.Config(web_api_app, host=config.web_api.host, port=config.web_api.port, log_level="critical")
+ web_api_server = uvicorn.Server(config)
+
+ asyncio.run(run_all(orchestrator, web_api_server))
diff --git a/eos/cli/pkg_cli.py b/eos/cli/pkg_cli.py
new file mode 100644
index 0000000..50bff34
--- /dev/null
+++ b/eos/cli/pkg_cli.py
@@ -0,0 +1,28 @@
+from pathlib import Path
+from typing import Annotated
+
+import typer
+
+pkg_app = typer.Typer()
+
+
+@pkg_app.command(name="create")
+def create_package(
+ name: Annotated[str, typer.Argument(help="Name of the package to create")],
+ user_dir: Annotated[
+ str, typer.Option("--user-dir", "-u", help="The directory containing EOS user configurations")
+ ] = "./user",
+) -> None:
+ """Create a new package with the specified name in the user directory."""
+ package_dir = Path(user_dir) / name
+ subdirs = ["common", "devices", "tasks", "labs", "experiments"]
+
+ try:
+ package_dir.mkdir(parents=True, exist_ok=False)
+ for subdir in subdirs:
+ (package_dir / subdir).mkdir()
+ typer.echo(f"Successfully created package '{name}' in {package_dir}")
+ except FileExistsError:
+ typer.echo(f"Error: Package '{name}' already exists in {user_dir}", err=True)
+ except Exception as e:
+ typer.echo(f"Error creating package: {e!s}", err=True)
diff --git a/eos/cli/web_api_cli.py b/eos/cli/web_api_cli.py
new file mode 100644
index 0000000..99f108e
--- /dev/null
+++ b/eos/cli/web_api_cli.py
@@ -0,0 +1,30 @@
+import os
+import subprocess
+import sys
+from typing import Annotated
+
+import typer
+
+
+# ruff: noqa: S603
+
+
+def start_web_api(
+ host: Annotated[str, typer.Option("--host", help="Host for the EOS web API server")] = "0.0.0.0",
+ port: Annotated[int, typer.Option("--port", help="Port for the EOS web API server")] = 8000,
+ orchestrator_host: Annotated[
+ str, typer.Option("--orchestrator-host", help="Host for the EOS orchestrator server")
+ ] = "localhost",
+ orchestrator_port: Annotated[
+ int, typer.Option("--orchestrator-port", help="Port for the EOS orchestrator server")
+ ] = 8070,
+) -> None:
+ env = os.environ.copy()
+ env["EOS_ORCHESTRATOR_HOST"] = str(orchestrator_host)
+ env["EOS_ORCHESTRATOR_PORT"] = str(orchestrator_port)
+
+ subprocess.run(
+ [sys.executable, "-m", "uvicorn", "--host", str(host), "--port", str(port), "eos.web_api.public.server:app"],
+ env=env,
+ check=True,
+ )
diff --git a/eos/configuration/__init__.py b/eos/configuration/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/configuration/configuration_manager.py b/eos/configuration/configuration_manager.py
new file mode 100644
index 0000000..af3688f
--- /dev/null
+++ b/eos/configuration/configuration_manager.py
@@ -0,0 +1,230 @@
+import os
+from typing import TYPE_CHECKING
+
+
+from eos.configuration.exceptions import (
+ EosConfigurationError,
+)
+from eos.configuration.package_manager import PackageManager
+from eos.configuration.plugin_registries.campaign_optimizer_plugin_registry import CampaignOptimizerPluginRegistry
+from eos.configuration.plugin_registries.device_plugin_registry import DevicePluginRegistry
+from eos.configuration.plugin_registries.task_plugin_registry import TaskPluginRegistry
+from eos.configuration.spec_registries.device_specification_registry import DeviceSpecificationRegistry
+from eos.configuration.spec_registries.task_specification_registry import (
+ TaskSpecificationRegistry,
+)
+from eos.configuration.validation.experiment_validator import ExperimentValidator
+from eos.configuration.validation.lab_validator import LabValidator
+from eos.configuration.validation.multi_lab_validator import MultiLabValidator
+from eos.logging.logger import log
+
+if TYPE_CHECKING:
+ from eos.configuration.entities.lab import LabConfig
+ from eos.configuration.entities.experiment import ExperimentConfig
+
+
+class ConfigurationManager:
+ """
+ The configuration manager is responsible for the data-driven configuration layer of EOS.
+ It allows loading and managing configurations for labs, experiments, tasks, and devices.
+ It also invokes the validation of the loaded configurations.
+ """
+
+ def __init__(self, user_dir: str):
+ self._user_dir = user_dir
+ self._package_manager = PackageManager(user_dir)
+
+ self.labs: dict[str, LabConfig] = {}
+ self.experiments: dict[str, ExperimentConfig] = {}
+
+ task_configs, task_dirs_to_task_types = self._package_manager.read_task_configs()
+ self.task_specs = TaskSpecificationRegistry(task_configs, task_dirs_to_task_types)
+ self.tasks = TaskPluginRegistry(self._package_manager)
+
+ device_configs, device_dirs_to_device_types = self._package_manager.read_device_configs()
+ self.device_specs = DeviceSpecificationRegistry(device_configs, device_dirs_to_device_types)
+ self.devices = DevicePluginRegistry(self._package_manager)
+
+ self.campaign_optimizers = CampaignOptimizerPluginRegistry(self._package_manager)
+
+ log.debug("Configuration manager initialized")
+
+ def get_lab_loaded_statuses(self) -> dict[str, bool]:
+ """
+ Returns a dictionary where the lab type (name of directory) is associated
+ with a boolean value indicating if it's currently loaded.
+ """
+ all_labs = set()
+
+ for package in self._package_manager.get_all_packages():
+ labs_dir = package.labs_dir
+ if labs_dir.is_dir():
+ package_labs = [d for d in os.listdir(labs_dir) if (labs_dir / d).is_dir()]
+ all_labs.update(package_labs)
+
+ return {lab: lab in self.labs for lab in all_labs}
+
+ def load_lab(self, lab_type: str, validate_multi_lab=True) -> None:
+ """
+ Load a new laboratory to the configuration manager.
+
+ :param lab_type: The type of the lab. This should match the name of the lab's directory in the
+ user directory.
+ :param validate_multi_lab: Whether to validate the multi-lab configuration after adding the lab.
+ """
+ lab_config = self._package_manager.read_lab_config(lab_type)
+
+ lab_validator = LabValidator(self._user_dir, lab_config)
+ lab_validator.validate()
+
+ self.labs[lab_type] = lab_config
+
+ if validate_multi_lab:
+ multi_lab_validator = MultiLabValidator(list(self.labs.values()))
+ multi_lab_validator.validate()
+
+ log.info(f"Loaded lab '{lab_type}'")
+ log.debug(f"Lab configuration: {lab_config}")
+
+ def load_labs(self, lab_types: set[str]) -> None:
+ """
+ Load multiple laboratories to the configuration manager.
+
+ :param lab_types: A list of lab types (names). Each type should match the name of the lab's directory in the
+ user directory.
+ """
+ for lab_name in lab_types:
+ self.load_lab(lab_name, validate_multi_lab=False)
+
+ multi_lab_validator = MultiLabValidator(list(self.labs.values()))
+ multi_lab_validator.validate()
+
+ def unload_labs(self, lab_types: set[str]) -> None:
+ """
+ Unload multiple labs from the configuration manager. Also unloads all experiments associated with the labs.
+
+ :param lab_types: A list of lab types (names) to remove.
+ """
+ for lab_type in lab_types:
+ self.unload_lab(lab_type)
+
+ def unload_lab(self, lab_type: str) -> None:
+ """
+ Unload a lab from the configuration manager. Also unloads all experiments associated with the lab.
+
+ :param lab_type: The type (name) of the lab to remove.
+ """
+ if lab_type not in self.labs:
+ raise EosConfigurationError(
+ f"Lab '{lab_type}' that was requested to be unloaded does not exist in the configuration manager"
+ )
+
+ self._unload_experiments_associated_with_labs({lab_type})
+
+ self.labs.pop(lab_type)
+ log.info(f"Unloaded lab '{lab_type}'")
+
+ def get_experiment_loaded_statuses(self) -> dict[str, bool]:
+ """
+ Returns a dictionary where the experiment type (name of directory) is associated
+ with a boolean value indicating if it's currently loaded.
+ """
+ all_experiments = set()
+
+ for package in self._package_manager.get_all_packages():
+ experiments_dir = package.experiments_dir
+ if experiments_dir.is_dir():
+ package_experiments = [d for d in os.listdir(experiments_dir) if (experiments_dir / d).is_dir()]
+ all_experiments.update(package_experiments)
+
+ return {exp: exp in self.experiments for exp in all_experiments}
+
+ def load_experiment(self, experiment_type: str) -> None:
+ """
+ Load a new experiment, making it available for execution.
+
+ :param experiment_type: The name of the experiment. This should match the name of the experiment
+ configuration file in the lab's directory.
+ """
+ if experiment_type in self.experiments:
+ raise EosConfigurationError(
+ f"Experiment '{experiment_type}' that was requested to be loaded is already loaded."
+ )
+
+ try:
+ experiment_config = self._package_manager.read_experiment_config(experiment_type)
+
+ experiment_validator = ExperimentValidator(experiment_config, list(self.labs.values()))
+ experiment_validator.validate()
+
+ self.campaign_optimizers.load_campaign_optimizer(experiment_type)
+ self.experiments[experiment_type] = experiment_config
+
+ log.info(f"Loaded experiment '{experiment_type}'")
+ log.debug(f"Experiment configuration: {experiment_config}")
+ except Exception:
+ self._cleanup_experiment_resources(experiment_type)
+ raise
+
+ def unload_experiment(self, experiment_name: str) -> None:
+ """
+ Unload an experiment from the configuration manager.
+
+ :param experiment_name: The name of the experiment to remove.
+ """
+ if experiment_name not in self.experiments:
+ raise EosConfigurationError(
+ f"Experiment '{experiment_name}' that was requested to be unloaded is not loaded."
+ )
+
+ self._cleanup_experiment_resources(experiment_name)
+ self.experiments.pop(experiment_name)
+ log.info(f"Unloaded experiment '{experiment_name}'")
+
+ def load_experiments(self, experiment_types: set[str]) -> None:
+ """
+ Load multiple experiments to the configuration manager.
+
+ :param experiment_types: A list of experiment names. Each name should match the name of the experiment's
+ configuration file in the experiments directory.
+ """
+ for experiment_type in experiment_types:
+ self.load_experiment(experiment_type)
+
+ def unload_experiments(self, experiment_types: set[str]) -> None:
+ """
+ Unload multiple experiments from the configuration manager.
+
+ :param experiment_types: A list of experiment names to remove.
+ """
+ for experiment_type in experiment_types:
+ self.unload_experiment(experiment_type)
+
+ def _cleanup_experiment_resources(self, experiment_name: str) -> None:
+ """
+ Clean up resources associated with an experiment.
+
+ :param experiment_name: The name of the experiment to clean up.
+ """
+ try:
+ self.campaign_optimizers.unload_campaign_optimizer(experiment_name)
+ except Exception as e:
+ raise EosConfigurationError(
+ f"Error unloading campaign optimizer for experiment '{experiment_name}': {e!s}"
+ ) from e
+
+ def _unload_experiments_associated_with_labs(self, lab_names: set[str]) -> None:
+ """
+ Unload all experiments associated with a list of labs from the configuration manager.
+
+ :param lab_names: A list of lab names.
+ """
+ experiments_to_remove = []
+ for experiment_name in self.experiments:
+ for lab_name in lab_names:
+ if lab_name in self.experiments[experiment_name].labs:
+ experiments_to_remove.append(experiment_name)
+
+ for experiment_name in experiments_to_remove:
+ self.unload_experiment(experiment_name)
+ log.info(f"Unloaded experiment '{experiment_name}' as it was associated with lab(s) {lab_names}")
diff --git a/eos/configuration/constants.py b/eos/configuration/constants.py
new file mode 100644
index 0000000..6dc6ccd
--- /dev/null
+++ b/eos/configuration/constants.py
@@ -0,0 +1,18 @@
+LABS_DIR = "labs"
+EXPERIMENTS_DIR = "experiments"
+TASKS_DIR = "tasks"
+DEVICES_DIR = "devices"
+COMMON_DIR = "common"
+
+EXPERIMENT_CONFIG_FILE_NAME = "experiment.yml"
+LAB_CONFIG_FILE_NAME = "lab.yml"
+DEVICE_CONFIG_FILE_NAME = "device.yml"
+TASK_CONFIG_FILE_NAME = "task.yml"
+
+DEVICE_IMPLEMENTATION_FILE_NAME = "device.py"
+TASK_IMPLEMENTATION_FILE_NAME = "task.py"
+
+CAMPAIGN_OPTIMIZER_FILE_NAME = "optimizer.py"
+CAMPAIGN_OPTIMIZER_CREATION_FUNCTION_NAME = "eos_create_campaign_optimizer"
+
+EOS_COMPUTER_NAME = "eos_computer"
diff --git a/eos/configuration/entities/__init__.py b/eos/configuration/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/configuration/entities/device_specification.py b/eos/configuration/entities/device_specification.py
new file mode 100644
index 0000000..612ceb4
--- /dev/null
+++ b/eos/configuration/entities/device_specification.py
@@ -0,0 +1,9 @@
+from dataclasses import dataclass
+from typing import Any
+
+
+@dataclass
+class DeviceSpecification:
+ type: str
+ description: str | None = None
+ initialization_parameters: dict[str, Any] | None = None
diff --git a/eos/configuration/entities/experiment.py b/eos/configuration/entities/experiment.py
new file mode 100644
index 0000000..48fbfc8
--- /dev/null
+++ b/eos/configuration/entities/experiment.py
@@ -0,0 +1,21 @@
+from dataclasses import dataclass
+from typing import Any
+
+from eos.configuration.entities.task import TaskConfig
+
+
+@dataclass
+class ExperimentContainerConfig:
+ id: str
+ description: str | None = None
+ metadata: dict[str, Any] | None = None
+ tags: list[str] | None = None
+
+
+@dataclass
+class ExperimentConfig:
+ type: str
+ description: str
+ labs: list[str]
+ tasks: list[TaskConfig]
+ containers: list[ExperimentContainerConfig] | None = None
diff --git a/eos/configuration/entities/lab.py b/eos/configuration/entities/lab.py
new file mode 100644
index 0000000..8ebefec
--- /dev/null
+++ b/eos/configuration/entities/lab.py
@@ -0,0 +1,42 @@
+from dataclasses import dataclass, field
+from typing import Any
+
+
+@dataclass
+class Location:
+ description: str
+ metadata: dict[str, Any] | None = None
+
+
+@dataclass
+class LabComputerConfig:
+ ip: str
+ description: str | None = None
+
+
+@dataclass
+class LabDeviceConfig:
+ type: str
+ computer: str
+ location: str | None = None
+ description: str | None = None
+ initialization_parameters: dict[str, Any] | None = None
+
+
+@dataclass
+class LabContainerConfig:
+ type: str
+ location: str
+ ids: list[str]
+ description: str | None = None
+ metadata: dict[str, Any] | None = None
+
+
+@dataclass
+class LabConfig:
+ type: str
+ description: str
+ devices: dict[str, LabDeviceConfig]
+ locations: dict[str, Location] = field(default_factory=dict)
+ computers: dict[str, LabComputerConfig] = field(default_factory=dict)
+ containers: list[LabContainerConfig] = field(default_factory=list)
diff --git a/eos/configuration/entities/parameters.py b/eos/configuration/entities/parameters.py
new file mode 100644
index 0000000..9c31b7c
--- /dev/null
+++ b/eos/configuration/entities/parameters.py
@@ -0,0 +1,209 @@
+from dataclasses import dataclass
+from enum import Enum
+from typing import Any, ClassVar
+
+from omegaconf import ListConfig
+
+from eos.configuration.exceptions import EosConfigurationError
+
+AllowedParameterTypes = int | float | bool | str | list | dict
+
+
+def is_dynamic_parameter(parameter: AllowedParameterTypes) -> bool:
+ return isinstance(parameter, str) and parameter.lower() == "eos_dynamic"
+
+
+class ParameterType(Enum):
+ integer = "integer"
+ decimal = "decimal"
+ string = "string"
+ boolean = "boolean"
+ choice = "choice"
+ list = "list"
+ dictionary = "dictionary"
+
+ def python_type(self) -> type:
+ mapping = {
+ "integer": int,
+ "decimal": float,
+ "string": str,
+ "boolean": bool,
+ "choice": str,
+ "list": list,
+ "dictionary": dict,
+ }
+ return mapping[self.value]
+
+
+@dataclass(kw_only=True)
+class Parameter:
+ type: ParameterType
+ description: str
+ value: Any | None = None
+
+ def __post_init__(self):
+ self._validate_type()
+
+ def _validate_type(self) -> None:
+ try:
+ self.type = ParameterType(self.type)
+ except ValueError as e:
+ raise EosConfigurationError(f"Invalid task parameter type '{self.type}'") from e
+
+
+@dataclass(kw_only=True)
+class NumericParameter(Parameter):
+ unit: str
+ min: int | float | None = None
+ max: int | float | None = None
+
+ def __post_init__(self):
+ super().__post_init__()
+ self._validate_unit()
+ self._validate_min_max()
+ self._validate_value_range()
+
+ def _validate_unit(self) -> None:
+ if not self.unit:
+ raise EosConfigurationError("Task parameter type is numeric but no unit is specified.")
+
+ def _validate_min_max(self) -> None:
+ if self.min is not None and self.max is not None and self.min >= self.max:
+ raise EosConfigurationError("Task parameter 'min' is greater than or equal to 'max'.")
+
+ def _validate_value_range(self) -> None:
+ if self.value is None or is_dynamic_parameter(self.value):
+ return
+
+ if not isinstance(self.value, int | float):
+ raise EosConfigurationError("Task parameter value is not numerical.")
+ if self.min is not None and self.value < self.min:
+ raise EosConfigurationError("Task parameter value is less than 'min'.")
+ if self.max is not None and self.value > self.max:
+ raise EosConfigurationError("Task parameter value is greater than 'max'.")
+
+
+@dataclass(kw_only=True)
+class BooleanParameter(Parameter):
+ def __post_init__(self):
+ super().__post_init__()
+ self._validate_value()
+
+ def _validate_value(self) -> None:
+ if not isinstance(self.value, bool) and not is_dynamic_parameter(self.value):
+ raise EosConfigurationError(
+ f"Task parameter value '{self.value}' is not true/false but the declared type is 'boolean'."
+ )
+
+
+@dataclass(kw_only=True)
+class ChoiceParameter(Parameter):
+ choices: list[str]
+
+ def __post_init__(self):
+ super().__post_init__()
+ self._validate_choices()
+
+ def _validate_choices(self) -> None:
+ if not self.choices:
+ raise EosConfigurationError("Task parameter choices are not specified when the type is 'choice'.")
+
+ if (
+ not self.value
+ or len(self.value) == 0
+ or self.value not in self.choices
+ and not is_dynamic_parameter(self.value)
+ ):
+ raise EosConfigurationError(
+ f"Task parameter value '{self.value}' is not one of the choices {self.choices}."
+ )
+
+
+@dataclass(kw_only=True)
+class ListParameter(Parameter):
+ element_type: ParameterType
+ length: int | None = None
+ min: list[int | float] | None = None
+ max: list[int | float] | None = None
+
+ def __post_init__(self):
+ super().__post_init__()
+ self._validate_element_type()
+ self._validate_list_attributes()
+ self._validate_elements_within_bounds()
+
+ def _validate_element_type(self) -> None:
+ if isinstance(self.element_type, str):
+ try:
+ self.element_type = ParameterType[self.element_type]
+ except KeyError as e:
+ raise EosConfigurationError(f"Invalid list parameter element type '{self.element_type}'") from e
+ if self.element_type == ParameterType.list:
+ raise EosConfigurationError("List parameter element type cannot be 'list'. Nested lists are not supported.")
+
+ def _validate_list_attributes(self) -> None:
+ for attr_name in ["value", "min", "max"]:
+ attr_value = getattr(self, attr_name)
+ if attr_value is None:
+ continue
+
+ if not isinstance(attr_value, list) and not isinstance(attr_value, ListConfig):
+ raise EosConfigurationError(
+ f"List parameter '{attr_name}' must be a list for 'list' type parameters.",
+ EosConfigurationError,
+ )
+ if not all(isinstance(item, self.element_type.python_type()) for item in attr_value):
+ raise EosConfigurationError(
+ f"All elements of list parameter '{attr_name}' must be of the same type as specified "
+ f"by 'element_type'."
+ )
+ if self.length is not None and len(attr_value) != self.length:
+ raise EosConfigurationError(f"List parameter '{attr_name}' length must be {self.length}.")
+
+ def _validate_elements_within_bounds(self) -> None:
+ if self.value is None or is_dynamic_parameter(self.value) or self.min is None and self.max is None:
+ return
+
+ if self.length is None and (self.min is not None or self.max is not None):
+ raise EosConfigurationError(
+ "List parameter 'min' and 'max' can only be specified when 'length' is specified."
+ )
+
+ _min = self.min or [float("-inf")] * self.length
+ _max = self.max or [float("inf")] * self.length
+ for i, val in enumerate(self.value):
+ if not _min[i] <= val <= _max[i]:
+ raise EosConfigurationError(
+ f"Element {i} of the list with value {val} is not within the the bounds [{_min[i]}, {_max[i]}]."
+ )
+
+
+@dataclass(kw_only=True)
+class DictionaryParameter(Parameter):
+ pass
+
+
+class ParameterFactory:
+ _TYPE_MAPPING: ClassVar = {
+ ParameterType.integer: NumericParameter,
+ ParameterType.decimal: NumericParameter,
+ ParameterType.string: Parameter,
+ ParameterType.boolean: BooleanParameter,
+ ParameterType.choice: ChoiceParameter,
+ ParameterType.list: ListParameter,
+ ParameterType.dictionary: DictionaryParameter,
+ }
+
+ @staticmethod
+ def create_parameter(parameter_type: ParameterType | str, **kwargs) -> Parameter:
+ if isinstance(parameter_type, str):
+ parameter_type = ParameterType(parameter_type)
+
+ parameter_class = ParameterFactory._TYPE_MAPPING.get(parameter_type)
+ if not parameter_class:
+ raise EosConfigurationError(f"Unsupported parameter type: {parameter_type}")
+
+ if "type" not in kwargs:
+ kwargs["type"] = parameter_type
+
+ return parameter_class(**kwargs)
diff --git a/eos/configuration/entities/task.py b/eos/configuration/entities/task.py
new file mode 100644
index 0000000..eca8b1c
--- /dev/null
+++ b/eos/configuration/entities/task.py
@@ -0,0 +1,21 @@
+from dataclasses import dataclass, field
+from typing import Any
+
+
+@dataclass
+class TaskDeviceConfig:
+ lab_id: str
+ id: str
+
+
+@dataclass
+class TaskConfig:
+ id: str
+ type: str
+ devices: list[TaskDeviceConfig] = field(default_factory=list)
+ containers: dict[str, str] = field(default_factory=dict)
+ parameters: dict[str, Any] = field(default_factory=dict)
+ dependencies: list[str] = field(default_factory=list)
+
+ max_duration_seconds: int | None = None
+ description: str | None = None
diff --git a/eos/configuration/entities/task_specification.py b/eos/configuration/entities/task_specification.py
new file mode 100644
index 0000000..7f5e232
--- /dev/null
+++ b/eos/configuration/entities/task_specification.py
@@ -0,0 +1,110 @@
+import re
+from dataclasses import dataclass, field
+from typing import Any
+
+from eos.configuration.entities.parameters import (
+ ParameterFactory,
+ ParameterType,
+)
+from eos.configuration.exceptions import EosConfigurationError
+
+
+@dataclass
+class TaskSpecificationContainer:
+ type: str
+
+ def __post_init__(self):
+ self._validate_type()
+
+ def _validate_type(self) -> None:
+ if not self.type.strip():
+ raise EosConfigurationError("Container 'type' field must be specified.")
+
+
+@dataclass
+class TaskSpecificationOutputParameter:
+ type: ParameterType
+ description: str
+ unit: str | None = None
+
+ def __post_init__(self):
+ self._validate_type()
+ self._validate_unit_specified_if_type_numeric()
+ self._validate_unit_not_specified_if_type_not_numeric()
+
+ def _validate_type(self) -> None:
+ try:
+ self.type = ParameterType(self.type)
+ except ValueError as e:
+ raise EosConfigurationError(f"Invalid task output parameter type '{self.type}'") from e
+
+ def _validate_unit_specified_if_type_numeric(self) -> None:
+ if self.type not in [ParameterType.integer, ParameterType.decimal]:
+ return
+ if self.unit is None or self.unit.strip() == "":
+ raise EosConfigurationError("Task output parameter type is numeric but no unit is specified.")
+
+ def _validate_unit_not_specified_if_type_not_numeric(self) -> None:
+ if self.type in [ParameterType.integer, ParameterType.decimal]:
+ return
+ if self.unit is not None:
+ raise EosConfigurationError("Task output parameter type is not numeric but a unit is specified.")
+
+
+@dataclass
+class TaskSpecification:
+ type: str
+ description: str
+ device_types: list[str] | None = None
+
+ input_containers: dict[str, TaskSpecificationContainer] = field(default_factory=dict)
+ input_parameters: dict[str, Any] = field(default_factory=dict)
+
+ output_parameters: dict[str, TaskSpecificationOutputParameter] = field(default_factory=dict)
+ output_containers: dict[str, TaskSpecificationContainer] = field(default_factory=dict)
+
+ def __post_init__(self):
+ if not self.output_containers:
+ self.output_containers = self.input_containers.copy()
+
+ self._validate_parameters()
+ self._validate_parameter_names()
+ self._validate_container_names()
+
+ def _validate_parameters(self) -> None:
+ for parameter in self.input_parameters.values():
+ _ = ParameterFactory.create_parameter(ParameterType(parameter["type"]), **parameter)
+
+ def _validate_parameter_names(self) -> None:
+ valid_name_pattern = re.compile(r"^[a-zA-Z0-9_.]*$")
+
+ for name in self.input_parameters:
+ if not valid_name_pattern.match(name):
+ raise EosConfigurationError(
+ f"Invalid task parameter name '{name}'. "
+ f"Only characters, numbers, dots, and underscores are allowed."
+ )
+
+ for name in self.output_parameters:
+ if not valid_name_pattern.match(name):
+ raise EosConfigurationError(
+ f"Invalid task parameter name '{name}'. "
+ f"Only characters, numbers, dots, and underscores are allowed."
+ )
+
+ def _validate_container_names(self) -> None:
+ valid_name_pattern = re.compile(r"^[a-zA-Z0-9_.]*$")
+
+ for name in self.input_containers:
+ if not valid_name_pattern.match(name):
+ raise EosConfigurationError(
+ f"Invalid task input container name '{name}'. "
+ f"Only characters, numbers, dots, and underscores are allowed."
+ )
+
+ for name in self.output_containers:
+ if not valid_name_pattern.match(name):
+ raise EosConfigurationError(
+ f"Invalid task output container name '{name}'. "
+ f"Only characters, numbers, dots, and underscores are allowed."
+ )
diff --git a/eos/configuration/exceptions.py b/eos/configuration/exceptions.py
new file mode 100644
index 0000000..a15fed2
--- /dev/null
+++ b/eos/configuration/exceptions.py
@@ -0,0 +1,38 @@
+class EosConfigurationError(Exception):
+ pass
+
+
+class EosMissingConfigurationError(Exception):
+ pass
+
+
+class EosExperimentConfigurationError(Exception):
+ pass
+
+
+class EosLabConfigurationError(Exception):
+ pass
+
+
+class EosContainerConfigurationError(Exception):
+ pass
+
+
+class EosTaskValidationError(Exception):
+ pass
+
+
+class EosDynamicParameterConfigurationError(Exception):
+ pass
+
+
+class EosTaskGraphError(Exception):
+ pass
+
+
+class EosTaskHandlerClassNotFoundError(Exception):
+ pass
+
+
+class EosCampaignOptimizerNotFoundError(Exception):
+ pass
diff --git a/eos/configuration/experiment_graph/__init__.py b/eos/configuration/experiment_graph/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/configuration/experiment_graph/experiment_graph.py b/eos/configuration/experiment_graph/experiment_graph.py
new file mode 100644
index 0000000..2801796
--- /dev/null
+++ b/eos/configuration/experiment_graph/experiment_graph.py
@@ -0,0 +1,88 @@
+from dataclasses import dataclass
+from typing import Any
+
+import networkx as nx
+
+from eos.configuration.entities.experiment import ExperimentConfig
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.configuration.exceptions import EosTaskGraphError
+from eos.configuration.experiment_graph.experiment_graph_builder import ExperimentGraphBuilder
+from eos.configuration.spec_registries.task_specification_registry import TaskSpecificationRegistry
+
+
+@dataclass
+class TaskNodeIO:
+ containers: list[str]
+ parameters: list[str]
+
+
+class ExperimentGraph:
+ def __init__(self, experiment_config: ExperimentConfig):
+ self._experiment_config = experiment_config
+ self._task_specs = TaskSpecificationRegistry()
+
+ self._graph = ExperimentGraphBuilder(experiment_config).build_graph()
+
+ self._task_subgraph = self._create_task_subgraph()
+ self._topologically_sorted_tasks = self._stable_topological_sort(self._task_subgraph)
+
+ if not nx.is_directed_acyclic_graph(self._task_subgraph):
+ raise EosTaskGraphError(f"Task graph of experiment '{experiment_config.type}' contains cycles.")
+
+ def _create_task_subgraph(self) -> nx.Graph:
+ return nx.subgraph_view(self._graph, filter_node=lambda n: self._graph.nodes[n]["node_type"] == "task")
+
+ def get_graph(self) -> nx.DiGraph:
+ return self._graph
+
+ def get_task_graph(self) -> nx.DiGraph:
+ return nx.DiGraph(self._task_subgraph)
+
+ def get_tasks(self) -> list[str]:
+ return list(self._task_subgraph.nodes)
+
+ def get_topologically_sorted_tasks(self) -> list[str]:
+ return self._topologically_sorted_tasks
+
+ def get_task_node(self, task_id: str) -> dict[str, Any]:
+ return self._graph.nodes[task_id]
+
+ def get_task_config(self, task_id: str) -> TaskConfig:
+ return TaskConfig(**self.get_task_node(task_id)["task_config"])
+
+ def get_task_spec(self, task_id: str) -> TaskSpecification:
+ return self._task_specs.get_spec_by_type(self.get_task_node(task_id)["task_config"].type)
+
+ def get_task_dependencies(self, task_id: str) -> list[str]:
+ return [pred for pred in self._graph.predecessors(task_id) if self._graph.nodes[pred]["node_type"] == "task"]
+
+ def _get_node_by_type(self, task_id: str, node_type: str, direction: str) -> list[str]:
+ if direction == "in":
+ nodes = self._graph.predecessors(task_id)
+ elif direction == "out":
+ nodes = self._graph.successors(task_id)
+ else:
+ raise ValueError("direction must be 'in' or 'out'")
+
+ return [node for node in nodes if self._graph.nodes[node]["node_type"] == node_type]
+
+ def get_task_inputs(self, task_id: str) -> TaskNodeIO:
+ return TaskNodeIO(
+ containers=self._get_node_by_type(task_id, "container", "in"),
+ parameters=self._get_node_by_type(task_id, "parameter", "in"),
+ )
+
+ def get_task_outputs(self, task_id: str) -> TaskNodeIO:
+ return TaskNodeIO(
+ containers=self._get_node_by_type(task_id, "container", "out"),
+ parameters=self._get_node_by_type(task_id, "parameter", "out"),
+ )
+
+ def get_container_node(self, container_id: str) -> dict[str, Any]:
+ return self._graph.nodes[container_id]
+
+ @staticmethod
+ def _stable_topological_sort(graph: nx.Graph) -> list[str]:
+ nodes = sorted(graph.nodes())
+ return list(nx.topological_sort(nx.DiGraph((u, v) for u, v in graph.edges() if u in nodes and v in nodes)))
diff --git a/eos/configuration/experiment_graph/experiment_graph_builder.py b/eos/configuration/experiment_graph/experiment_graph_builder.py
new file mode 100644
index 0000000..ab7ea2b
--- /dev/null
+++ b/eos/configuration/experiment_graph/experiment_graph_builder.py
@@ -0,0 +1,108 @@
+import networkx as nx
+
+from eos.configuration.entities.experiment import ExperimentConfig
+from eos.configuration.spec_registries.task_specification_registry import (
+ TaskSpecificationRegistry,
+)
+from eos.configuration.validation import validation_utils
+
+
+class ExperimentGraphBuilder:
+ """
+ Builds an experiment graph from an experiment configuration and lab configurations.
+ """
+
+ def __init__(self, experiment_config: ExperimentConfig):
+ self._experiment = experiment_config
+ self._task_specs = TaskSpecificationRegistry()
+
+ def build_graph(self) -> nx.DiGraph:
+ graph = nx.DiGraph()
+
+ self._add_begin_and_end_nodes(graph)
+ self._add_task_nodes_and_edges(graph)
+ self._add_container_nodes(graph)
+ self._add_parameter_nodes(graph)
+ self._connect_orphan_task_nodes(graph)
+ self._remove_orphan_nodes(graph)
+
+ return graph
+
+ def _add_begin_and_end_nodes(self, graph: nx.DiGraph) -> None:
+ graph.add_node("Begin", node_type="begin")
+ graph.add_node("End", node_type="end")
+
+ first_task = self._experiment.tasks[0].id
+ last_task = self._experiment.tasks[-1].id
+ graph.add_edge("Begin", first_task)
+ graph.add_edge(last_task, "End")
+
+ def _add_task_nodes_and_edges(self, graph: nx.DiGraph) -> None:
+ for task in self._experiment.tasks:
+ graph.add_node(task.id, node_type="task", task_config=task)
+ for dep in task.dependencies:
+ graph.add_edge(dep, task.id)
+
+ @staticmethod
+ def _connect_orphan_task_nodes(graph: nx.DiGraph) -> None:
+ for node, node_data in list(graph.nodes(data=True)):
+ if node_data["node_type"] == "task" and node not in ["Begin", "End"]:
+ if graph.in_degree(node) == 0:
+ graph.add_edge("Begin", node)
+ if graph.out_degree(node) == 0:
+ graph.add_edge(node, "End")
+
+ def _add_container_nodes(self, graph: nx.DiGraph) -> None:
+ container_mapping = {}
+ used_containers = set()
+
+ for task in self._experiment.tasks:
+ for container_name, container_id in task.containers.items():
+ # Determine the container ID for this task
+ if container_id not in used_containers:
+ input_container_id = container_id
+ else:
+ input_container_id = f"{container_id}_{task.id}"
+
+ # If this container is used as output by a previous task, update the input_container_id
+ if container_id in container_mapping:
+ previous_output_container_id = container_mapping[container_id]
+ input_container_id = previous_output_container_id
+
+ # Add container node as input for the current task
+ if input_container_id not in graph:
+ graph.add_node(input_container_id, node_type="container", container={container_name: container_id})
+ graph.add_edge(input_container_id, task.id)
+
+ # Add container node as output for the current task
+ output_container_id = f"{container_id}_{task.id}"
+ graph.add_node(output_container_id, node_type="container", container={container_name: container_id})
+ graph.add_edge(task.id, output_container_id)
+
+ # Update the container mapping to link the output of this task to the next task's input
+ container_mapping[container_id] = output_container_id
+
+ used_containers.add(container_id)
+
+ def _add_parameter_nodes(self, graph: nx.DiGraph) -> None:
+ for task in self._experiment.tasks:
+ for param_name, param_value in task.parameters.items():
+ if validation_utils.is_parameter_reference(param_value):
+ parameter_reference = param_value
+ producer_task_id, parameter_name = parameter_reference.split(".")
+ graph.add_node(parameter_reference, node_type="parameter")
+ graph.add_edge(parameter_reference, task.id, mapped_parameter=param_name)
+ graph.add_edge(producer_task_id, parameter_reference)
+
+ # Add output parameters based on task specs
+ task_spec = self._task_specs.get_spec_by_config(task)
+ for param_name in task_spec.output_parameters:
+ ref_param_name = f"{task.id}.{param_name}"
+ graph.add_node(ref_param_name, node_type="parameter")
+ graph.add_edge(task.id, ref_param_name)
+
+ @staticmethod
+ def _remove_orphan_nodes(graph: nx.DiGraph) -> None:
+ orphan_nodes = [node for node in graph.nodes if graph.in_degree(node) == 0 and graph.out_degree(node) == 0]
+ for node in orphan_nodes:
+ graph.remove_node(node)
diff --git a/eos/configuration/package.py b/eos/configuration/package.py
new file mode 100644
index 0000000..31e81f1
--- /dev/null
+++ b/eos/configuration/package.py
@@ -0,0 +1,18 @@
+from pathlib import Path
+
+from eos.configuration.constants import COMMON_DIR, EXPERIMENTS_DIR, LABS_DIR, DEVICES_DIR, TASKS_DIR
+
+
+class Package:
+ """
+ A collection of user-defined common files, experiments, labs, devices, and tasks.
+ """
+
+ def __init__(self, name: str, path: str):
+ self.name = name
+ self.path = Path(path)
+ self.common_dir = self.path / COMMON_DIR
+ self.experiments_dir = self.path / EXPERIMENTS_DIR
+ self.labs_dir = self.path / LABS_DIR
+ self.devices_dir = self.path / DEVICES_DIR
+ self.tasks_dir = self.path / TASKS_DIR
diff --git a/eos/configuration/package_manager.py b/eos/configuration/package_manager.py
new file mode 100644
index 0000000..02298e8
--- /dev/null
+++ b/eos/configuration/package_manager.py
@@ -0,0 +1,332 @@
+import os
+from dataclasses import dataclass
+from enum import Enum, auto
+from pathlib import Path
+from typing import TypeVar, Generic, Any
+
+import jinja2
+import yaml
+from omegaconf import OmegaConf, ValidationError
+
+from eos.configuration.constants import (
+ LABS_DIR,
+ EXPERIMENTS_DIR,
+ TASKS_DIR,
+ DEVICES_DIR,
+ LAB_CONFIG_FILE_NAME,
+ EXPERIMENT_CONFIG_FILE_NAME,
+ DEVICE_CONFIG_FILE_NAME,
+ TASK_CONFIG_FILE_NAME,
+)
+from eos.configuration.entities.device_specification import DeviceSpecification
+from eos.configuration.entities.experiment import ExperimentConfig
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.configuration.exceptions import EosConfigurationError, EosMissingConfigurationError
+from eos.configuration.package import Package
+from eos.configuration.package_validator import PackageValidator
+from eos.logging.logger import log
+
+T = TypeVar("T")
+
+
+class EntityType(Enum):
+ LAB = auto()
+ EXPERIMENT = auto()
+ TASK = auto()
+ DEVICE = auto()
+
+
+@dataclass
+class EntityInfo:
+ dir_name: str
+ config_file_name: str
+ config_type: type
+
+
+@dataclass
+class EntityLocationInfo:
+ package_name: str
+ entity_path: str
+
+
+ENTITY_INFO: dict[EntityType, EntityInfo] = {
+ EntityType.LAB: EntityInfo(LABS_DIR, LAB_CONFIG_FILE_NAME, LabConfig),
+ EntityType.EXPERIMENT: EntityInfo(EXPERIMENTS_DIR, EXPERIMENT_CONFIG_FILE_NAME, ExperimentConfig),
+ EntityType.TASK: EntityInfo(TASKS_DIR, TASK_CONFIG_FILE_NAME, TaskSpecification),
+ EntityType.DEVICE: EntityInfo(DEVICES_DIR, DEVICE_CONFIG_FILE_NAME, DeviceSpecification),
+}
+ConfigType = LabConfig | ExperimentConfig | TaskSpecification | DeviceSpecification
+
+
+class EntityConfigReader(Generic[T]):
+ """
+ Reads and parses entity configurations from files.
+
+ The EntityConfigReader class provides static methods to read and parse configuration
+ files for various entity types (labs, experiments, tasks, and devices) in the EOS system.
+ It handles the loading, validation, and structuring of configuration data using OmegaConf.
+ """
+
+ @staticmethod
+ def read_entity_config(file_path: str, entity_type: EntityType) -> ConfigType:
+ entity_info = ENTITY_INFO[entity_type]
+ return EntityConfigReader._read_config(file_path, entity_info.config_type, f"{entity_type.name}")
+
+ @staticmethod
+ def read_all_entity_configs(base_dir: str, entity_type: EntityType) -> tuple[dict[str, ConfigType], dict[str, str]]:
+ entity_info = ENTITY_INFO[entity_type]
+ configs = {}
+ dirs_to_types = {}
+
+ for root, _, files in os.walk(base_dir):
+ if entity_info.config_file_name not in files:
+ continue
+
+ entity_subdir = Path(root).relative_to(base_dir)
+ config_file_path = Path(root) / entity_info.config_file_name
+
+ try:
+ structured_config = EntityConfigReader.read_entity_config(str(config_file_path), entity_type)
+ entity_type_name = structured_config.type
+ configs[entity_type_name] = structured_config
+ dirs_to_types[entity_subdir] = entity_type_name
+
+ log.debug(
+ f"Loaded {entity_type.name.lower()} specification from directory '{entity_subdir}' of type "
+ f"'{entity_type_name}'"
+ )
+ log.debug(f"{entity_type.name} configuration '{entity_type_name}': {structured_config}")
+ except EosConfigurationError as e:
+ log.error(f"Error loading {entity_type.name.lower()} configuration from '{config_file_path}': {e}")
+ raise
+
+ return configs, dirs_to_types
+
+ @staticmethod
+ def _read_config(file_path: str, config_type: type[ConfigType], config_name: str) -> ConfigType:
+ try:
+ config_data = EntityConfigReader._process_jinja_yaml(file_path)
+
+ structured_config = OmegaConf.merge(OmegaConf.structured(config_type), OmegaConf.create(config_data))
+ _ = OmegaConf.to_object(structured_config)
+
+ return structured_config
+ except OSError as e:
+ raise EosConfigurationError(f"Error reading configuration file '{file_path}': {e!s}") from e
+ except ValidationError as e:
+ raise EosConfigurationError(f"Configuration is invalid: {e!s}") from e
+ except jinja2.exceptions.TemplateError as e:
+ raise EosConfigurationError(f"Error in Jinja2 template processing for '{config_name}': {e!s}") from e
+ except Exception as e:
+ raise EosConfigurationError(f"Error processing {config_name} configuration: {e!s}") from e
+
+ @staticmethod
+ def _process_jinja_yaml(file_path: str) -> dict[str, Any]:
+ """
+ Process a YAML file with Jinja2 templating, without passing any variables.
+
+ This method:
+ 1. Reads the YAML file
+ 2. Renders the Jinja2 template without any variables
+ 3. Parses the rendered content back into a Python dictionary
+ """
+ try:
+ with Path(file_path).open() as f:
+ raw_content = f.read()
+ except OSError as e:
+ raise EosConfigurationError(f"Error reading file '{file_path}': {e}") from e
+
+ try:
+ env = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(Path(file_path).parents[3]), # user directory
+ undefined=jinja2.StrictUndefined,
+ autoescape=True,
+ )
+
+ template = env.from_string(raw_content)
+ rendered_content = template.render()
+
+ return yaml.safe_load(rendered_content)
+ except yaml.YAMLError as e:
+ raise EosConfigurationError(f"Error parsing YAML in {file_path}: {e}") from e
+ except jinja2.exceptions.TemplateError as e:
+ raise EosConfigurationError(f"Error in Jinja2 template processing: {e}") from e
+
+
+class PackageDiscoverer:
+ """
+ Discovers packages in the user directory.
+ """
+
+ def __init__(self, user_dir: str):
+ self.user_dir = Path(user_dir)
+
+ def discover_packages(self) -> dict[str, Package]:
+ packages = {}
+ if not self.user_dir.is_dir():
+ raise EosMissingConfigurationError(f"User directory '{self.user_dir}' does not exist")
+
+ for item in os.listdir(self.user_dir):
+ package_path = self.user_dir / item
+
+ if package_path.is_dir():
+ packages[item] = Package(item, package_path)
+
+ return packages
+
+
+class PackageManager:
+ """
+ Manages packages and entity configurations within the user directory.
+
+ The PackageManager class provides facilities to discover, read, add, and remove packages,
+ as well as read entity configurations (labs, experiments, tasks, and devices) from these packages.
+ It also maintains efficient lookup indices for quick access to entities across all packages.
+ """
+
+ def __init__(self, user_dir: str):
+ self.user_dir = user_dir
+
+ self.packages: dict[str, Package] = {}
+ self.entity_indices: dict[EntityType, dict[str, EntityLocationInfo]] = {
+ entity_type: {} for entity_type in EntityType
+ }
+
+ self._discover_packages()
+ log.info(f"Found packages: {', '.join(self.packages.keys())}")
+
+ log.debug("Package manager initialized")
+
+ def read_lab_config(self, lab_name: str) -> LabConfig:
+ entity_location = self._get_entity_location(lab_name, EntityType.LAB)
+ config_file_path = self._get_config_file_path(entity_location, EntityType.LAB)
+ return EntityConfigReader.read_entity_config(config_file_path, EntityType.LAB)
+
+ def read_experiment_config(self, experiment_name: str) -> ExperimentConfig:
+ entity_location = self._get_entity_location(experiment_name, EntityType.EXPERIMENT)
+ config_file_path = self._get_config_file_path(entity_location, EntityType.EXPERIMENT)
+ return EntityConfigReader.read_entity_config(config_file_path, EntityType.EXPERIMENT)
+
+ def read_task_configs(self) -> tuple[dict[str, TaskSpecification], dict[str, str]]:
+ return self._read_all_entity_configs(EntityType.TASK)
+
+ def read_device_configs(self) -> tuple[dict[str, DeviceSpecification], dict[str, str]]:
+ return self._read_all_entity_configs(EntityType.DEVICE)
+
+ def get_package(self, name: str) -> Package | None:
+ return self.packages.get(name)
+
+ def get_all_packages(self) -> list[Package]:
+ return list(self.packages.values())
+
+ def _find_package_for_entity(self, entity_name: str, entity_type: EntityType) -> Package | None:
+ entity_location = self.entity_indices[entity_type].get(entity_name)
+ if entity_location:
+ return self.packages.get(entity_location.package_name)
+ return None
+
+ def find_package_for_lab(self, lab_name: str) -> Package | None:
+ return self._find_package_for_entity(lab_name, EntityType.LAB)
+
+ def find_package_for_experiment(self, experiment_name: str) -> Package | None:
+ return self._find_package_for_entity(experiment_name, EntityType.EXPERIMENT)
+
+ def find_package_for_task(self, task_name: str) -> Package | None:
+ return self._find_package_for_entity(task_name, EntityType.TASK)
+
+ def find_package_for_device(self, device_name: str) -> Package | None:
+ return self._find_package_for_entity(device_name, EntityType.DEVICE)
+
+ def add_package(self, package_name: str) -> None:
+ package_path = Path(self.user_dir) / package_name
+ if not package_path.is_dir():
+ raise EosMissingConfigurationError(f"Package directory '{package_path}' does not exist")
+
+ new_package = Package(package_name, package_path)
+ PackageValidator(self.user_dir, {package_name: new_package}).validate()
+
+ self.packages[package_name] = new_package
+ self._update_entity_indices(new_package)
+
+ log.info(f"Added package '{package_name}'")
+
+ def remove_package(self, package_name: str) -> None:
+ if package_name not in self.packages:
+ raise EosMissingConfigurationError(f"Package '{package_name}' not found")
+
+ package = self.packages[package_name]
+ del self.packages[package_name]
+ self._remove_package_from_indices(package)
+
+ log.info(f"Removed package '{package_name}'")
+
+ def _discover_packages(self) -> None:
+ self.packages = PackageDiscoverer(self.user_dir).discover_packages()
+ PackageValidator(self.user_dir, self.packages).validate()
+ self._build_entity_indices()
+
+ def _build_entity_indices(self) -> None:
+ for entity_type in EntityType:
+ self.entity_indices[entity_type] = {}
+ for package_name, package in self.packages.items():
+ entity_dir = Path(getattr(package, f"{ENTITY_INFO[entity_type].dir_name}_dir"))
+ if entity_dir.is_dir():
+ self._index_entities(entity_type, package_name, str(entity_dir))
+
+ def _index_entities(self, entity_type: EntityType, package_name: str, entity_dir: str) -> None:
+ for root, _, files in os.walk(entity_dir):
+ if ENTITY_INFO[entity_type].config_file_name in files:
+ entity_path = Path(root).relative_to(Path(entity_dir))
+ entity_name = Path(entity_path).name
+ self.entity_indices[entity_type][entity_name] = EntityLocationInfo(package_name, str(entity_path))
+
+ def _update_entity_indices(self, package: Package) -> None:
+ for entity_type in EntityType:
+ entity_dir = Path(getattr(package, f"{ENTITY_INFO[entity_type].dir_name}_dir"))
+ if entity_dir.is_dir():
+ self._index_entities(entity_type, package.name, str(entity_dir))
+
+ def _remove_package_from_indices(self, package: Package) -> None:
+ for entity_type in EntityType:
+ self.entity_indices[entity_type] = {
+ entity_name: location
+ for entity_name, location in self.entity_indices[entity_type].items()
+ if location.package_name != package.name
+ }
+
+ def _get_entity_location(self, entity_name: str, entity_type: EntityType) -> EntityLocationInfo:
+ entity_location = self.entity_indices[entity_type].get(entity_name)
+ if not entity_location:
+ raise EosMissingConfigurationError(f"{entity_type.name} '{entity_name}' not found")
+ return entity_location
+
+ def _get_config_file_path(self, entity_location: EntityLocationInfo, entity_type: EntityType) -> str:
+ entity_info = ENTITY_INFO[entity_type]
+ package = self.packages[entity_location.package_name]
+ config_file_path = (
+ Path(getattr(package, f"{entity_info.dir_name}_dir"))
+ / entity_location.entity_path
+ / entity_info.config_file_name
+ )
+
+ if not config_file_path.is_file():
+ raise EosMissingConfigurationError(
+ f"{entity_type.name} file '{entity_info.config_file_name}' does not exist for "
+ f"{entity_type.name.lower()} '{entity_location.entity_path}'",
+ EosMissingConfigurationError,
+ )
+
+ return config_file_path
+
+ def _read_all_entity_configs(self, entity_type: EntityType) -> tuple[dict[str, T], dict[str, str]]:
+ all_configs = {}
+ all_dirs_to_types = {}
+ for package in self.packages.values():
+ entity_dir = Path(getattr(package, f"{ENTITY_INFO[entity_type].dir_name}_dir"))
+ if not entity_dir.is_dir():
+ continue
+ configs, dirs_to_types = EntityConfigReader.read_all_entity_configs(entity_dir, entity_type)
+ all_configs.update(configs)
+ all_dirs_to_types.update({Path(package.name) / k: v for k, v in dirs_to_types.items()})
+ return all_configs, all_dirs_to_types
diff --git a/eos/configuration/package_validator.py b/eos/configuration/package_validator.py
new file mode 100644
index 0000000..cf3137b
--- /dev/null
+++ b/eos/configuration/package_validator.py
@@ -0,0 +1,173 @@
+import os
+from pathlib import Path
+
+from eos.configuration.constants import (
+ COMMON_DIR,
+ EXPERIMENTS_DIR,
+ LABS_DIR,
+ DEVICES_DIR,
+ TASKS_DIR,
+ LAB_CONFIG_FILE_NAME,
+ EXPERIMENT_CONFIG_FILE_NAME,
+ TASK_CONFIG_FILE_NAME,
+ TASK_IMPLEMENTATION_FILE_NAME,
+ DEVICE_CONFIG_FILE_NAME,
+ DEVICE_IMPLEMENTATION_FILE_NAME,
+)
+from eos.configuration.exceptions import EosMissingConfigurationError, EosConfigurationError
+from eos.configuration.package import Package
+from eos.logging.logger import log
+
+
+class PackageValidator:
+ """
+ Responsible for validating user-defined packages.
+ """
+
+ def __init__(self, user_dir: str, packages: dict[str, Package]):
+ self.user_dir = user_dir
+ self.packages = packages
+
+ def validate(self) -> None:
+ if not self.packages:
+ raise EosMissingConfigurationError(f"No valid packages found in the user directory '{self.user_dir}'")
+
+ for package in self.packages.values():
+ self._validate_package_structure(package)
+
+ def _validate_package_structure(self, package: Package) -> None:
+ """
+ Validate the structure of a single package.
+ """
+ if not any(
+ [
+ package.common_dir.is_dir(),
+ package.experiments_dir.is_dir(),
+ package.labs_dir.is_dir(),
+ package.devices_dir.is_dir(),
+ package.tasks_dir.is_dir(),
+ ]
+ ):
+ raise EosMissingConfigurationError(
+ f"Package '{package.name}' does not contain any of the directories: "
+ f"{COMMON_DIR}, {EXPERIMENTS_DIR}, {LABS_DIR}, {DEVICES_DIR}, {TASKS_DIR}"
+ )
+
+ if package.labs_dir.is_dir():
+ self._validate_labs_dir(package)
+
+ if package.experiments_dir.is_dir():
+ self._validate_experiments_dir(package)
+
+ if package.devices_dir.is_dir():
+ self._validate_devices_dir(package)
+
+ if package.tasks_dir.is_dir():
+ self._validate_tasks_dir(package)
+
+ @staticmethod
+ def _validate_labs_dir(package: Package) -> None:
+ """
+ Validate the structure of the labs directory.
+ """
+ for file in os.listdir(package.labs_dir):
+ file_path = package.labs_dir / file
+ if not file_path.is_dir():
+ raise EosConfigurationError(
+ f"Non-directory file found in '{package.labs_dir}'. Only lab directories are allowed."
+ )
+
+ for lab in os.listdir(package.labs_dir):
+ lab_file_path = package.labs_dir / lab / LAB_CONFIG_FILE_NAME
+ if not lab_file_path.is_file():
+ raise EosMissingConfigurationError(f"Lab file '{LAB_CONFIG_FILE_NAME}' does not exist for lab '{lab}'")
+
+ log.debug(f"Detected lab '{lab}' in package '{package.name}'")
+
+ @staticmethod
+ def _validate_experiments_dir(package: Package) -> None:
+ """
+ Validate the structure of the experiments directory.
+ """
+ for file in os.listdir(package.experiments_dir):
+ file_path = package.experiments_dir / file
+ if not file_path.is_dir():
+ raise EosConfigurationError(
+ f"Non-directory file found in '{package.experiments_dir}'. Only experiment directories "
+ f"are allowed."
+ )
+
+ experiment_config_file = file_path / EXPERIMENT_CONFIG_FILE_NAME
+ if not experiment_config_file.is_file():
+ raise EosMissingConfigurationError(
+ f"Experiment configuration file '{EXPERIMENT_CONFIG_FILE_NAME}' does not exist for "
+ f"experiment '{file}'"
+ )
+
+ log.debug(f"Detected experiment '{file}' in package '{package.name}'")
+
+ @staticmethod
+ def _validate_tasks_dir(package: Package) -> None:
+ """
+ Validate the structure of the tasks directory.
+ Ensure each subdirectory represents a task and contains the necessary files.
+ """
+ task_types = []
+ for current_dir, _, files in os.walk(package.tasks_dir):
+ if TASK_CONFIG_FILE_NAME not in files:
+ continue
+
+ task_dir = Path(current_dir)
+ task_name = task_dir.relative_to(package.tasks_dir)
+
+ config_file = task_dir / TASK_CONFIG_FILE_NAME
+ implementation_file = task_dir / TASK_IMPLEMENTATION_FILE_NAME
+
+ if not config_file.is_file():
+ raise EosMissingConfigurationError(
+ f"Task configuration file '{TASK_CONFIG_FILE_NAME}' not found for task '{task_name}' "
+ f"in package '{package.name}'."
+ )
+
+ if not implementation_file.is_file():
+ raise EosMissingConfigurationError(
+ f"Task implementation file '{TASK_IMPLEMENTATION_FILE_NAME}' not found for task "
+ f"'{task_name}' in package '{package.name}'."
+ )
+
+ task_types.append(task_dir)
+
+ log.debug(f"Detected tasks '{task_types}' in package '{package.name}'")
+
+ @staticmethod
+ def _validate_devices_dir(package: Package) -> None:
+ """
+ Validate the structure of the devices directory.
+ Ensure each subdirectory represents a device and contains the necessary files.
+ """
+ device_types = []
+ for current_dir, _, files in os.walk(package.devices_dir):
+ if DEVICE_CONFIG_FILE_NAME not in files:
+ continue
+
+ device_dir = Path(current_dir)
+ device_name = device_dir.relative_to(package.devices_dir)
+
+ config_file = device_dir / DEVICE_CONFIG_FILE_NAME
+ implementation_file = device_dir / DEVICE_IMPLEMENTATION_FILE_NAME
+
+ if not config_file.is_file():
+ raise EosMissingConfigurationError(
+ f"Device configuration file '{DEVICE_CONFIG_FILE_NAME}' not found for device "
+ f"'{device_name}' in package '{package.name}'."
+ )
+
+ if not implementation_file.is_file():
+ raise EosMissingConfigurationError(
+ f"Device implementation file '{DEVICE_IMPLEMENTATION_FILE_NAME}' not found for device "
+ f"'{device_name}' in package '{package.name}'."
+ )
+
+ device_types.append(device_dir)
+
+ log.debug("Detected devices '%S' in package '%s'", device_types, package.name)
diff --git a/eos/configuration/plugin_registries/__init__.py b/eos/configuration/plugin_registries/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/configuration/plugin_registries/campaign_optimizer_plugin_registry.py b/eos/configuration/plugin_registries/campaign_optimizer_plugin_registry.py
new file mode 100644
index 0000000..291168f
--- /dev/null
+++ b/eos/configuration/plugin_registries/campaign_optimizer_plugin_registry.py
@@ -0,0 +1,116 @@
+import importlib.util
+from collections.abc import Callable
+from pathlib import Path
+from typing import Any
+
+from eos.configuration.constants import (
+ CAMPAIGN_OPTIMIZER_FILE_NAME,
+ CAMPAIGN_OPTIMIZER_CREATION_FUNCTION_NAME,
+)
+from eos.configuration.package_manager import PackageManager
+from eos.configuration.plugin_registries.plugin_registry import PluginRegistry, PluginRegistryConfig
+from eos.logging.logger import log
+from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+class CampaignOptimizerPluginRegistry(
+ PluginRegistry[Callable[[], tuple[dict[str, Any], type[AbstractSequentialOptimizer]]], Any]
+):
+ """
+ Responsible for dynamically loading campaign optimizers from all packages
+ and providing references to them for later use.
+ """
+
+ def __init__(self, package_manager: PackageManager):
+ config = PluginRegistryConfig(
+ spec_registry=None, # Campaign optimizers don't use a specification registry
+ base_class=None, # Campaign optimizers don't have a base class
+ config_file_name=None, # Campaign optimizers don't have a separate config file
+ implementation_file_name=CAMPAIGN_OPTIMIZER_FILE_NAME,
+ class_suffix="", # Campaign optimizers don't use a class suffix
+ error_class=Exception, # Using generic Exception for simplicity
+ directory_name="experiments_dir",
+ )
+ super().__init__(package_manager, config)
+
+ def get_campaign_optimizer_creation_parameters(
+ self, experiment_type: str
+ ) -> tuple[dict[str, Any], type[AbstractSequentialOptimizer]] | None:
+ """
+ Get a function that can be used to get the constructor arguments and the optimizer type so it can be
+ constructed later.
+
+ :param experiment_type: The type of the experiment.
+ :return: A tuple containing the constructor arguments and the optimizer type, or None if not found.
+ """
+ optimizer_function = self.get_plugin_class_type(experiment_type)
+ if optimizer_function:
+ return optimizer_function()
+ return None
+
+ def _load_single_plugin(self, package_name: str, dir_path: str, implementation_file: str) -> None:
+ module_name = Path(dir_path).name
+ spec = importlib.util.spec_from_file_location(module_name, implementation_file)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+
+ if CAMPAIGN_OPTIMIZER_CREATION_FUNCTION_NAME in module.__dict__:
+ experiment_type = module_name
+ self._plugin_types[experiment_type] = module.__dict__[CAMPAIGN_OPTIMIZER_CREATION_FUNCTION_NAME]
+ self._plugin_modules[experiment_type] = implementation_file
+ log.info(f"Loaded campaign optimizer for experiment '{experiment_type}' from package '{package_name}'.")
+ else:
+ log.warning(
+ f"Optimizer configuration function '{CAMPAIGN_OPTIMIZER_CREATION_FUNCTION_NAME}' not found in the "
+ f"campaign optimizer file '{self._config.implementation_file_name}' of experiment "
+ f"'{Path(dir_path).name}' in package '{package_name}'."
+ )
+
+ def load_campaign_optimizer(self, experiment_type: str) -> None:
+ """
+ Load the optimizer configuration function for the given experiment from the appropriate package.
+ If the optimizer doesn't exist, log a warning and return without raising an error.
+ """
+ experiment_package = self._package_manager.find_package_for_experiment(experiment_type)
+ if not experiment_package:
+ log.warning(f"No package found for experiment '{experiment_type}'.")
+ return
+
+ optimizer_file = (
+ Path(experiment_package.experiments_dir) / experiment_type / self._config.implementation_file_name
+ )
+
+ if not Path(optimizer_file).exists():
+ log.warning(
+ f"No campaign optimizer found for experiment '{experiment_type}' in package "
+ f"'{experiment_package.name}'."
+ )
+ return
+
+ self._load_single_plugin(experiment_package.name, experiment_type, optimizer_file)
+
+ def unload_campaign_optimizer(self, experiment_type: str) -> None:
+ """
+ Unload the optimizer configuration function for the given experiment.
+ """
+ if experiment_type in self._plugin_types:
+ del self._plugin_types[experiment_type]
+ del self._plugin_modules[experiment_type]
+ log.info(f"Unloaded campaign optimizer for experiment '{experiment_type}'.")
+
+ def reload_plugin(self, experiment_type: str) -> None:
+ """
+ Reload a specific campaign optimizer by its experiment type.
+ """
+ self.unload_campaign_optimizer(experiment_type)
+ self.load_campaign_optimizer(experiment_type)
+ log.info(f"Reloaded campaign optimizer for experiment '{experiment_type}'.")
+
+ def reload_all_plugins(self) -> None:
+ """
+ Reload all campaign optimizers.
+ """
+ experiment_types = list(self._plugin_types.keys())
+ for experiment_type in experiment_types:
+ self.reload_plugin(experiment_type)
+ log.info("Reloaded all campaign optimizers.")
diff --git a/eos/configuration/plugin_registries/device_plugin_registry.py b/eos/configuration/plugin_registries/device_plugin_registry.py
new file mode 100644
index 0000000..87f9dd3
--- /dev/null
+++ b/eos/configuration/plugin_registries/device_plugin_registry.py
@@ -0,0 +1,23 @@
+from eos.configuration.constants import DEVICE_CONFIG_FILE_NAME, DEVICE_IMPLEMENTATION_FILE_NAME
+from eos.configuration.package_manager import PackageManager
+from eos.configuration.plugin_registries.plugin_registry import PluginRegistry, PluginRegistryConfig
+from eos.configuration.spec_registries.device_specification_registry import DeviceSpecificationRegistry
+from eos.devices.base_device import BaseDevice
+from eos.devices.exceptions import EosDeviceClassNotFoundError
+
+
+class DevicePluginRegistry(PluginRegistry[BaseDevice, DeviceSpecificationRegistry]):
+ def __init__(self, package_manager: PackageManager):
+ config = PluginRegistryConfig(
+ spec_registry=DeviceSpecificationRegistry(),
+ base_class=BaseDevice,
+ config_file_name=DEVICE_CONFIG_FILE_NAME,
+ implementation_file_name=DEVICE_IMPLEMENTATION_FILE_NAME,
+ class_suffix="Device",
+ error_class=EosDeviceClassNotFoundError,
+ directory_name="devices_dir",
+ )
+ super().__init__(package_manager, config)
+
+ def get_device_class_type(self, device_type: str) -> type[BaseDevice]:
+ return self.get_plugin_class_type(device_type)
diff --git a/eos/configuration/plugin_registries/plugin_registry.py b/eos/configuration/plugin_registries/plugin_registry.py
new file mode 100644
index 0000000..8859083
--- /dev/null
+++ b/eos/configuration/plugin_registries/plugin_registry.py
@@ -0,0 +1,118 @@
+import importlib
+import inspect
+import os
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Generic, TypeVar
+
+from eos.configuration.package_manager import PackageManager
+from eos.logging.batch_error_logger import batch_error, raise_batched_errors
+from eos.logging.logger import log
+from eos.utils.singleton import Singleton
+
+T = TypeVar("T")
+S = TypeVar("S") # Specification registry type
+
+
+@dataclass
+class PluginRegistryConfig:
+ spec_registry: S
+ base_class: type[T]
+ config_file_name: str
+ implementation_file_name: str
+ class_suffix: str
+ error_class: type[Exception]
+ directory_name: str
+
+
+class PluginRegistry(Generic[T, S], metaclass=Singleton):
+ """
+ A generic registry for dynamically discovering and managing plugin-like implementation classes.
+ Supports on-demand reloading of plugins.
+ """
+
+ def __init__(self, package_manager: PackageManager, config: PluginRegistryConfig):
+ self._package_manager = package_manager
+ self._config = config
+ self._plugin_types: dict[str, type[T]] = {}
+ self._plugin_modules: dict[str, str] = {} # Maps type_name to module path
+
+ self._load_plugin_modules()
+
+ def get_plugin_class_type(self, type_name: str) -> type[T]:
+ """
+ Get the plugin class type for the given type name.
+ """
+ if type_name in self._plugin_types:
+ return self._plugin_types[type_name]
+
+ raise self._config.error_class(f"Plugin implementation for '{type_name}' not found.")
+
+ def _load_plugin_modules(self) -> None:
+ self._plugin_types.clear()
+ self._plugin_modules.clear()
+
+ for package in self._package_manager.get_all_packages():
+ directory = getattr(package, self._config.directory_name)
+
+ if not Path(directory).is_dir():
+ continue
+
+ for current_dir, _, files in os.walk(directory):
+ if self._config.config_file_name not in files:
+ continue
+
+ dir_path = Path(current_dir).relative_to(Path(directory))
+
+ implementation_file = Path(current_dir) / self._config.implementation_file_name
+
+ self._load_single_plugin(package.name, dir_path, implementation_file)
+
+ raise_batched_errors(root_exception_type=self._config.error_class)
+
+ def _load_single_plugin(self, package_name: str, dir_path: Path, implementation_file: Path) -> None:
+ module_name = Path(dir_path).name
+ spec = importlib.util.spec_from_file_location(module_name, implementation_file)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+
+ found_implementation = False
+ for name, obj in module.__dict__.items():
+ if inspect.isclass(obj) and obj is not self._config.base_class and name.endswith(self._config.class_suffix):
+ type_name = self._config.spec_registry.get_spec_by_dir(Path(package_name) / dir_path)
+ self._plugin_types[type_name] = obj
+ self._plugin_modules[type_name] = implementation_file
+ found_implementation = True
+ log.debug(
+ f"Loaded {self._config.class_suffix.lower()} plugin '{name}' for type '{type_name}' from package "
+ f"'{package_name}'"
+ )
+ break
+
+ if not found_implementation:
+ batch_error(
+ f"{self._config.class_suffix} plugin for '{module_name}' in package '{package_name}' not found."
+ f" Make sure that its name ends in '{self._config.class_suffix}'.",
+ self._config.error_class,
+ )
+
+ def reload_plugin(self, type_name: str) -> None:
+ """
+ Reload a specific plugin by its type name.
+ """
+ if type_name not in self._plugin_modules:
+ raise self._config.error_class(f"Plugin '{type_name}' not found.")
+
+ implementation_file = self._plugin_modules[type_name]
+ package_name = Path(implementation_file).parent.parent.name
+ dir_path = os.path.relpath(Path(implementation_file).parent, Path(implementation_file).parent.parent)
+
+ self._load_single_plugin(package_name, dir_path, implementation_file)
+ log.info(f"Reloaded plugin '{type_name}'")
+
+ def reload_all_plugins(self) -> None:
+ """
+ Reload all plugins.
+ """
+ self._load_plugin_modules()
+ log.info("Reloaded all plugins")
diff --git a/eos/configuration/plugin_registries/task_plugin_registry.py b/eos/configuration/plugin_registries/task_plugin_registry.py
new file mode 100644
index 0000000..842fc05
--- /dev/null
+++ b/eos/configuration/plugin_registries/task_plugin_registry.py
@@ -0,0 +1,23 @@
+from eos.configuration.constants import TASK_CONFIG_FILE_NAME, TASK_IMPLEMENTATION_FILE_NAME
+from eos.configuration.exceptions import EosTaskHandlerClassNotFoundError
+from eos.configuration.package_manager import PackageManager
+from eos.configuration.plugin_registries.plugin_registry import PluginRegistry, PluginRegistryConfig
+from eos.configuration.spec_registries.task_specification_registry import TaskSpecificationRegistry
+from eos.tasks.base_task import BaseTask
+
+
+class TaskPluginRegistry(PluginRegistry[BaseTask, TaskSpecificationRegistry]):
+ def __init__(self, package_manager: PackageManager):
+ config = PluginRegistryConfig(
+ spec_registry=TaskSpecificationRegistry(),
+ base_class=BaseTask,
+ config_file_name=TASK_CONFIG_FILE_NAME,
+ implementation_file_name=TASK_IMPLEMENTATION_FILE_NAME,
+ class_suffix="Task",
+ error_class=EosTaskHandlerClassNotFoundError,
+ directory_name="tasks_dir",
+ )
+ super().__init__(package_manager, config)
+
+ def get_task_class_type(self, task_type: str) -> type[BaseTask]:
+ return self.get_plugin_class_type(task_type)
diff --git a/eos/configuration/spec_registries/__init__.py b/eos/configuration/spec_registries/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/configuration/spec_registries/device_specification_registry.py b/eos/configuration/spec_registries/device_specification_registry.py
new file mode 100644
index 0000000..e69fdb6
--- /dev/null
+++ b/eos/configuration/spec_registries/device_specification_registry.py
@@ -0,0 +1,9 @@
+from eos.configuration.entities.device_specification import DeviceSpecification
+from eos.configuration.entities.lab import LabDeviceConfig
+from eos.configuration.spec_registries.specification_registry import SpecificationRegistry
+
+
+class DeviceSpecificationRegistry(SpecificationRegistry[DeviceSpecification, LabDeviceConfig]):
+ """
+ The device specification registry stores the specifications for all devices that are available in EOS.
+ """
diff --git a/eos/configuration/spec_registries/specification_registry.py b/eos/configuration/spec_registries/specification_registry.py
new file mode 100644
index 0000000..d9ee45c
--- /dev/null
+++ b/eos/configuration/spec_registries/specification_registry.py
@@ -0,0 +1,38 @@
+from typing import Generic, TypeVar
+
+from eos.utils.singleton import Singleton
+
+T = TypeVar("T") # Specification type
+C = TypeVar("C") # Configuration type
+
+
+class SpecificationRegistry(Generic[T, C], metaclass=Singleton):
+ """
+ A generic registry for storing and retrieving specifications.
+ """
+
+ def __init__(
+ self,
+ specifications: dict[str, T],
+ dirs_to_types: dict[str, str],
+ ):
+ self._specifications = specifications.copy()
+ self._dirs_to_types = dirs_to_types.copy()
+
+ def get_all_specs(self) -> dict[str, T]:
+ return self._specifications
+
+ def get_spec_by_type(self, spec_type: str) -> T | None:
+ return self._specifications.get(spec_type)
+
+ def get_spec_by_config(self, config: C) -> T | None:
+ return self._specifications.get(config.type)
+
+ def get_spec_by_dir(self, dir_path: str) -> str:
+ return self._dirs_to_types.get(dir_path)
+
+ def spec_exists_by_config(self, config: C) -> bool:
+ return config.type in self._specifications
+
+ def spec_exists_by_type(self, spec_type: str) -> bool:
+ return spec_type in self._specifications
diff --git a/eos/configuration/spec_registries/task_specification_registry.py b/eos/configuration/spec_registries/task_specification_registry.py
new file mode 100644
index 0000000..9b9c160
--- /dev/null
+++ b/eos/configuration/spec_registries/task_specification_registry.py
@@ -0,0 +1,24 @@
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.configuration.spec_registries.specification_registry import SpecificationRegistry
+
+
+class TaskSpecificationRegistry(SpecificationRegistry[TaskSpecification, TaskConfig]):
+ """
+ The task specification registry stores the specifications for all tasks that are available in EOS.
+ """
+
+ def __init__(
+ self,
+ task_specifications: dict[str, TaskSpecification],
+ task_dirs_to_task_types: dict[str, str],
+ ):
+ updated_specs = self._update_output_containers(task_specifications)
+ super().__init__(updated_specs, task_dirs_to_task_types)
+
+ @staticmethod
+ def _update_output_containers(specs: dict[str, TaskSpecification]) -> dict[str, TaskSpecification]:
+ for spec in specs.values():
+ if not spec.output_containers:
+ spec.output_containers = spec.input_containers.copy()
+ return specs
diff --git a/eos/configuration/validation/__init__.py b/eos/configuration/validation/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/configuration/validation/container_registry.py b/eos/configuration/validation/container_registry.py
new file mode 100644
index 0000000..f27d6c5
--- /dev/null
+++ b/eos/configuration/validation/container_registry.py
@@ -0,0 +1,28 @@
+from eos.configuration.entities.experiment import (
+ ExperimentConfig,
+)
+from eos.configuration.entities.lab import (
+ LabConfig,
+ LabContainerConfig,
+)
+
+
+class ContainerRegistry:
+ """
+ The container registry stores information about the containers in labs.
+ """
+
+ def __init__(self, experiment_config: ExperimentConfig, lab_configs: list[LabConfig]):
+ self._experiment_config = experiment_config
+ self._lab_configs = [lab for lab in lab_configs if lab.type in self._experiment_config.labs]
+
+ def find_container_by_id(self, container_id: str) -> LabContainerConfig | None:
+ """
+ Find a container in the lab by its id.
+ """
+ for lab in self._lab_configs:
+ for container in lab.containers:
+ if container_id in container.ids:
+ return container
+
+ return None
diff --git a/eos/configuration/validation/container_validator.py b/eos/configuration/validation/container_validator.py
new file mode 100644
index 0000000..b026a79
--- /dev/null
+++ b/eos/configuration/validation/container_validator.py
@@ -0,0 +1,36 @@
+from eos.configuration.entities.experiment import (
+ ExperimentConfig,
+ ExperimentContainerConfig,
+)
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.exceptions import EosContainerConfigurationError
+from eos.configuration.validation.container_registry import ContainerRegistry
+
+
+class ExperimentContainerValidator:
+ """
+ Validate the containers of an experiment.
+ """
+
+ def __init__(self, experiment_config: ExperimentConfig, lab_configs: list[LabConfig]):
+ self._experiment_config = experiment_config
+ self._lab_configs = lab_configs
+
+ self._container_registry = ContainerRegistry(experiment_config, lab_configs)
+
+ def validate(self) -> None:
+ self._validate_containers()
+
+ def _validate_containers(self) -> None:
+ if not self._experiment_config.containers:
+ return
+ for container in self._experiment_config.containers:
+ self._validate_container_exists(container)
+
+ def _validate_container_exists(self, container: ExperimentContainerConfig) -> None:
+ for lab in self._lab_configs:
+ for lab_container in lab.containers:
+ if container.id in lab_container.ids:
+ return
+
+ raise EosContainerConfigurationError(f"Container '{container.id}' does not exist.")
diff --git a/eos/configuration/validation/experiment_validator.py b/eos/configuration/validation/experiment_validator.py
new file mode 100644
index 0000000..15d99ce
--- /dev/null
+++ b/eos/configuration/validation/experiment_validator.py
@@ -0,0 +1,39 @@
+from eos.configuration.entities.experiment import ExperimentConfig
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.exceptions import EosExperimentConfigurationError
+from eos.configuration.validation.container_validator import (
+ ExperimentContainerValidator,
+)
+
+from eos.configuration.validation.task_sequence_validator import (
+ TaskSequenceValidator,
+)
+
+
+class ExperimentValidator:
+ def __init__(
+ self,
+ experiment_config: ExperimentConfig,
+ lab_configs: list[LabConfig],
+ ):
+ self._experiment_config = experiment_config
+ self._lab_configs = lab_configs
+
+ def validate(self) -> None:
+ self._validate_labs()
+ ExperimentContainerValidator(self._experiment_config, self._lab_configs).validate()
+ TaskSequenceValidator(self._experiment_config, self._lab_configs).validate()
+
+ def _validate_labs(self) -> None:
+ lab_types = [lab.type for lab in self._lab_configs]
+ invalid_labs = []
+ for lab in self._experiment_config.labs:
+ if lab not in lab_types:
+ invalid_labs.append(lab)
+
+ if invalid_labs:
+ invalid_labs_str = "\n ".join(invalid_labs)
+ raise EosExperimentConfigurationError(
+ f"The following labs required by experiment '{self._experiment_config.type}' do not exist:"
+ f"\n {invalid_labs_str}"
+ )
diff --git a/eos/configuration/validation/lab_validator.py b/eos/configuration/validation/lab_validator.py
new file mode 100644
index 0000000..e5ae2e1
--- /dev/null
+++ b/eos/configuration/validation/lab_validator.py
@@ -0,0 +1,164 @@
+from pathlib import Path
+
+from eos.configuration.constants import LABS_DIR, EOS_COMPUTER_NAME
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.exceptions import EosLabConfigurationError
+from eos.configuration.spec_registries.device_specification_registry import DeviceSpecificationRegistry
+from eos.configuration.spec_registries.task_specification_registry import (
+ TaskSpecificationRegistry,
+)
+from eos.logging.batch_error_logger import batch_error, raise_batched_errors
+
+
+class LabValidator:
+ """
+ Validates the configuration of a lab. It validates the locations, devices, and containers defined in the
+ lab configuration.
+ """
+
+ def __init__(self, config_dir: str, lab_config: LabConfig):
+ self._lab_config = lab_config
+ self._lab_config_dir = Path(config_dir) / LABS_DIR / lab_config.type.lower()
+ self._tasks = TaskSpecificationRegistry()
+ self._devices = DeviceSpecificationRegistry()
+
+ def validate(self) -> None:
+ self._validate_lab_folder_name_matches_lab_type()
+ self._validate_locations()
+ self._validate_computers()
+ self._validate_devices()
+ self._validate_containers()
+
+ def _validate_locations(self) -> None:
+ self._validate_device_locations()
+ self._validate_container_locations()
+
+ def _validate_lab_folder_name_matches_lab_type(self) -> None:
+ if Path(self._lab_config_dir).name != self._lab_config.type:
+ raise EosLabConfigurationError(
+ f"Lab folder name '{Path(self._lab_config_dir).name}' does not match lab type "
+ f"'{self._lab_config.type}'."
+ )
+
+ def _validate_device_locations(self) -> None:
+ locations = self._lab_config.locations
+ for device_name, device in self._lab_config.devices.items():
+ if device.location and device.location not in locations:
+ batch_error(
+ f"Device '{device_name}' has invalid location '{device.location}'.",
+ EosLabConfigurationError,
+ )
+ raise_batched_errors(EosLabConfigurationError)
+
+ def _validate_container_locations(self) -> None:
+ locations = self._lab_config.locations
+ for container in self._lab_config.containers:
+ if container.location not in locations:
+ raise EosLabConfigurationError(
+ f"Container of type '{container.type}' has invalid location '{container.location}'."
+ )
+
+ def _validate_computers(self) -> None:
+ self._validate_computer_unique_ips()
+ self._validate_eos_computer_not_specified()
+
+ def _validate_computer_unique_ips(self) -> None:
+ ip_addresses = set()
+
+ for computer_name, computer in self._lab_config.computers.items():
+ if computer.ip in ip_addresses:
+ batch_error(
+ f"Computer '{computer_name}' has a duplicate IP address '{computer.ip}'.",
+ EosLabConfigurationError,
+ )
+ ip_addresses.add(computer.ip)
+
+ raise_batched_errors(EosLabConfigurationError)
+
+ def _validate_eos_computer_not_specified(self) -> None:
+ for computer_name, computer in self._lab_config.computers.items():
+ if computer_name.lower() == EOS_COMPUTER_NAME:
+ batch_error(
+ "Computer name 'eos_computer' is reserved and cannot be used.",
+ EosLabConfigurationError,
+ )
+ if computer.ip in ["127.0.0.1", "localhost"]:
+ batch_error(
+ f"Computer '{computer_name}' cannot use the reserved IP '127.0.0.1' or 'localhost'.",
+ EosLabConfigurationError,
+ )
+ raise_batched_errors(EosLabConfigurationError)
+
+ def _validate_devices(self) -> None:
+ self._validate_devices_have_computers()
+ self._validate_device_initialization_parameters()
+
+ def _validate_devices_have_computers(self) -> None:
+ for device_name, device in self._lab_config.devices.items():
+ if device.computer.lower() == EOS_COMPUTER_NAME:
+ continue
+ if device.computer not in self._lab_config.computers:
+ batch_error(
+ f"Device '{device_name}' has invalid computer '{device.computer}'.",
+ EosLabConfigurationError,
+ )
+ raise_batched_errors(EosLabConfigurationError)
+
+ def _validate_device_initialization_parameters(self) -> None:
+ for device_name, device in self._lab_config.devices.items():
+ device_spec = self._devices.get_spec_by_config(device)
+ if not device_spec:
+ batch_error(
+ f"No specification found for device type '{device.type}' of device '{device_name}'.",
+ EosLabConfigurationError,
+ )
+ continue
+
+ if device.initialization_parameters:
+ spec_params = device_spec.initialization_parameters or {}
+ for param_name in device.initialization_parameters:
+ if param_name not in spec_params:
+ batch_error(
+ f"Invalid initialization parameter '{param_name}' for device '{device_name}' "
+ f"of type '{device.type}' in lab type '{self._lab_config.type}'. "
+ f"Valid parameters are: {', '.join(spec_params.keys())}",
+ EosLabConfigurationError,
+ )
+
+ raise_batched_errors(EosLabConfigurationError)
+
+ def _validate_containers(self) -> None:
+ self._validate_container_unique_types()
+ self._validate_container_unique_ids()
+
+ def _validate_container_unique_types(self) -> None:
+ container_types = []
+ for container in self._lab_config.containers:
+ container_types.append(container.type)
+
+ unique_container_types = set(container_types)
+
+ for container_type in unique_container_types:
+ if container_types.count(container_type) > 1:
+ batch_error(
+ f"Container type '{container_type}' already defined."
+ f" Please add more ids to the existing container definition.",
+ EosLabConfigurationError,
+ )
+ raise_batched_errors(EosLabConfigurationError)
+
+ def _validate_container_unique_ids(self) -> None:
+ container_ids = set()
+ duplicate_ids = set()
+ for container in self._lab_config.containers:
+ for container_id in container.ids:
+ if container_id in container_ids:
+ duplicate_ids.add(container_id)
+ else:
+ container_ids.add(container_id)
+
+ if duplicate_ids:
+ duplicate_ids_str = "\n ".join(duplicate_ids)
+ raise EosLabConfigurationError(
+ f"Containers must have unique IDs. The following are not unique:\n {duplicate_ids_str}"
+ )
diff --git a/eos/configuration/validation/multi_lab_validator.py b/eos/configuration/validation/multi_lab_validator.py
new file mode 100644
index 0000000..2d9523d
--- /dev/null
+++ b/eos/configuration/validation/multi_lab_validator.py
@@ -0,0 +1,51 @@
+from collections import defaultdict
+
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.exceptions import EosLabConfigurationError
+
+
+class MultiLabValidator:
+ """
+ Cross-checks all lab configuration. It validates that all container IDs are globally unique.
+ """
+
+ def __init__(self, lab_configs: list[LabConfig]):
+ self._lab_configs = lab_configs
+
+ def validate(self) -> None:
+ self._validate_computer_ips_globally_unique()
+ self._validate_container_ids_globally_unique()
+
+ def _validate_computer_ips_globally_unique(self) -> None:
+ computer_ips = defaultdict(list)
+
+ for lab in self._lab_configs:
+ for computer in lab.computers.values():
+ computer_ips[computer.ip].append(lab.type)
+
+ duplicate_ips = {ip: labs for ip, labs in computer_ips.items() if len(labs) > 1}
+
+ if duplicate_ips:
+ duplicate_ips_str = "\n ".join(
+ f"'{ip}': defined in labs {', '.join(labs)}" for ip, labs in duplicate_ips.items()
+ )
+ raise EosLabConfigurationError(
+ f"The following computer IPs are not globally unique:\n {duplicate_ips_str}"
+ )
+
+ def _validate_container_ids_globally_unique(self) -> None:
+ container_ids = defaultdict(list)
+ for lab in self._lab_configs:
+ for container in lab.containers:
+ for container_id in container.ids:
+ container_ids[container_id].append(lab.type)
+
+ duplicate_ids = {container_id: labs for container_id, labs in container_ids.items() if len(labs) > 1}
+
+ if duplicate_ids:
+ duplicate_ids_str = "\n ".join(
+ f"'{container_id}': defined in labs {', '.join(labs)}" for container_id, labs in duplicate_ids.items()
+ )
+ raise EosLabConfigurationError(
+ f"The following container IDs are not globally unique:\n {duplicate_ids_str}"
+ )
diff --git a/eos/configuration/validation/task_sequence/__init__.py b/eos/configuration/validation/task_sequence/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/configuration/validation/task_sequence/base_task_sequence_validator.py b/eos/configuration/validation/task_sequence/base_task_sequence_validator.py
new file mode 100644
index 0000000..27b2551
--- /dev/null
+++ b/eos/configuration/validation/task_sequence/base_task_sequence_validator.py
@@ -0,0 +1,28 @@
+from abc import ABC, abstractmethod
+
+from eos.configuration.entities.experiment import (
+ ExperimentConfig,
+)
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.spec_registries.task_specification_registry import (
+ TaskSpecificationRegistry,
+)
+
+
+class BaseTaskSequenceValidator(ABC):
+ def __init__(
+ self,
+ experiment_config: ExperimentConfig,
+ lab_configs: list[LabConfig],
+ ):
+ self._experiment_config = experiment_config
+ self._lab_configs = lab_configs
+ self._tasks = TaskSpecificationRegistry()
+
+ @abstractmethod
+ def validate(self) -> None:
+ pass
+
+ def _find_task_by_id(self, task_id: str) -> TaskConfig | None:
+ return next((task for task in self._experiment_config.tasks if task.id == task_id), None)
diff --git a/eos/configuration/validation/task_sequence/task_input_container_validator.py b/eos/configuration/validation/task_sequence/task_input_container_validator.py
new file mode 100644
index 0000000..1832e49
--- /dev/null
+++ b/eos/configuration/validation/task_sequence/task_input_container_validator.py
@@ -0,0 +1,113 @@
+from eos.configuration.entities.lab import LabContainerConfig
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification, TaskSpecificationContainer
+from eos.configuration.exceptions import EosTaskValidationError
+from eos.configuration.validation import validation_utils
+from eos.configuration.validation.container_registry import ContainerRegistry
+from eos.logging.batch_error_logger import batch_error, raise_batched_errors
+
+
+class TaskInputContainerValidator:
+ """
+ Validates that the input containers of a task conform to the task's specification.
+ """
+
+ def __init__(
+ self,
+ task: TaskConfig,
+ task_spec: TaskSpecification,
+ container_registry: ContainerRegistry,
+ ):
+ self._task_id = task.id
+ self._input_containers = task.containers
+ self._task_spec = task_spec
+ self._container_registry = container_registry
+
+ def validate_input_containers(self) -> None:
+ """
+ Validate the input containers of a task.
+ Check whether the types of containers match the task's requirements and whether the quantities are correct.
+ """
+ self._validate_input_container_requirements()
+ raise_batched_errors(root_exception_type=EosTaskValidationError)
+
+ def _validate_input_container_requirements(self) -> None:
+ """
+ Validate that the input containers of a task meet its requirements in terms of types and quantities.
+ """
+ required_containers = self._get_required_containers()
+ provided_containers = self._get_provided_containers()
+
+ self._validate_container_counts(required_containers, provided_containers)
+ self._validate_container_types(required_containers, provided_containers)
+
+ def _get_required_containers(self) -> dict[str, TaskSpecificationContainer]:
+ """
+ Get the required containers as specified in the task specification.
+ """
+ return self._task_spec.input_containers
+
+ def _get_provided_containers(self) -> dict[str, str]:
+ """
+ Get the provided containers, validating their existence if not a reference.
+ """
+ provided_containers = {}
+ for container_name, container_id in self._input_containers.items():
+ if validation_utils.is_container_reference(container_id):
+ provided_containers[container_name] = "reference"
+ else:
+ lab_container = self._validate_container_exists(container_id)
+ provided_containers[container_name] = lab_container.type
+ return provided_containers
+
+ def _validate_container_exists(self, container_id: str) -> LabContainerConfig:
+ """
+ Validate the existence of a container in the lab.
+ """
+ container = self._container_registry.find_container_by_id(container_id)
+
+ if not container:
+ batch_error(
+ f"Container '{container_id}' in task '{self._task_id}' does not exist in the lab.",
+ EosTaskValidationError,
+ )
+
+ return container
+
+ def _validate_container_counts(
+ self, required: dict[str, TaskSpecificationContainer], provided: dict[str, str]
+ ) -> None:
+ """
+ Validate that the total number of containers matches the requirements.
+ """
+ if len(provided) != len(required):
+ batch_error(
+ f"Task '{self._task_id}' requires {len(required)} container(s) but {len(provided)} were provided.",
+ EosTaskValidationError,
+ )
+
+ def _validate_container_types(
+ self, required: dict[str, TaskSpecificationContainer], provided: dict[str, str]
+ ) -> None:
+ """
+ Validate that the types of non-reference containers match the requirements.
+ """
+ for container_name, container_spec in required.items():
+ if container_name not in provided:
+ batch_error(
+ f"Required container '{container_name}' not provided for task '{self._task_id}'.",
+ EosTaskValidationError,
+ )
+ elif provided[container_name] != "reference" and provided[container_name] != container_spec.type:
+ batch_error(
+ f"Container '{container_name}' in task '{self._task_id}' has incorrect type. "
+ f"Expected '{container_spec.type}' but got '{provided[container_name]}'.",
+ EosTaskValidationError,
+ )
+
+ for container_name in provided:
+ if container_name not in required:
+ batch_error(
+ f"Unexpected container '{container_name}' provided for task '{self._task_id}'.",
+ EosTaskValidationError,
+ )
diff --git a/eos/configuration/validation/task_sequence/task_input_parameter_validator.py b/eos/configuration/validation/task_sequence/task_input_parameter_validator.py
new file mode 100644
index 0000000..3ea6bf2
--- /dev/null
+++ b/eos/configuration/validation/task_sequence/task_input_parameter_validator.py
@@ -0,0 +1,139 @@
+import copy
+from typing import Any
+
+from omegaconf import DictConfig, ListConfig, OmegaConf
+
+from eos.configuration.entities.parameters import ParameterType, ParameterFactory
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.configuration.exceptions import (
+ EosTaskValidationError,
+ EosConfigurationError,
+)
+from eos.configuration.validation import validation_utils
+from eos.logging.batch_error_logger import batch_error, raise_batched_errors
+
+
+class TaskInputParameterValidator:
+ """
+ Validates that the input parameters of a task conform to the task's specification.
+ """
+
+ def __init__(self, task: TaskConfig, task_spec: TaskSpecification):
+ self._task_id = task.id
+ self._input_parameters = task.parameters
+ self._task_spec = task_spec
+
+ def validate_input_parameters(self, allow_non_concrete_parameters=True) -> None:
+ """
+ Validate the input parameters of a task.
+ Ensure that all required parameters are provided and that the provided parameters conform to the task's
+ specification.
+
+ :param allow_non_concrete_parameters: Whether to allow non-concrete parameters: reference or dynamic parameters.
+ """
+ for parameter_name in self._input_parameters:
+ self._validate_parameter_in_task_spec(parameter_name)
+ raise_batched_errors(root_exception_type=EosTaskValidationError)
+
+ self._validate_all_required_parameters_provided()
+
+ for parameter_name, parameter in self._input_parameters.items():
+ if allow_non_concrete_parameters:
+ self._validate_parameter(parameter_name, parameter)
+ else:
+ self._validate_concrete_parameter(parameter_name, parameter)
+ raise_batched_errors(root_exception_type=EosTaskValidationError)
+
+ def _validate_parameter_in_task_spec(self, parameter_name: str) -> None:
+ """
+ Check that the parameter exists in the task specification.
+ """
+ if parameter_name not in self._task_spec.input_parameters:
+ batch_error(
+ f"Parameter '{parameter_name}' in task '{self._task_id}' is invalid. "
+ f"Expected a parameter found in the task specification.",
+ EosTaskValidationError,
+ )
+
+ def _validate_parameter(self, parameter_name: str, parameter: Any) -> None:
+ """
+ Validate a parameter according to the task specification. Ignore parameter references and special parameters.
+ """
+ if validation_utils.is_parameter_reference(parameter) or validation_utils.is_dynamic_parameter(parameter):
+ return
+
+ self._validate_parameter_spec(parameter_name, parameter)
+
+ def _validate_concrete_parameter(self, parameter_name: str, parameter: Any) -> None:
+ """
+ Validate a parameter according to the task specification. Expect that the parameter is concrete.
+ """
+ if validation_utils.is_parameter_reference(parameter):
+ batch_error(
+ f"Input parameter '{parameter_name}' in task '{self._task_id}' is a parameter reference, which is not "
+ f"allowed.",
+ EosTaskValidationError,
+ )
+ elif validation_utils.is_dynamic_parameter(parameter):
+ batch_error(
+ f"Input parameter '{parameter_name}' in task '{self._task_id}' is 'eos_dynamic', which is not "
+ f"allowed.",
+ EosTaskValidationError,
+ )
+ else:
+ self._validate_parameter_spec(parameter_name, parameter)
+
+ def _validate_parameter_spec(self, parameter_name: str, parameter: Any) -> None:
+ """
+ Validate a parameter to make sure it conforms to its task specification.
+ """
+ parameter_spec = copy.deepcopy(self._task_spec.input_parameters[parameter_name])
+
+ if isinstance(parameter, ListConfig | DictConfig):
+ parameter = OmegaConf.to_object(parameter)
+
+ if not isinstance(parameter, ParameterType(parameter_spec.type).python_type()):
+ batch_error(
+ f"Parameter '{parameter_name}' in task '{self._task_id}' has incorrect type {type(parameter)}. "
+ f"Expected type: '{parameter_spec.type}'.",
+ EosTaskValidationError,
+ )
+ return
+
+ parameter_spec["value"] = parameter
+
+ try:
+ parameter_type = ParameterType(parameter_spec.type)
+ ParameterFactory.create_parameter(parameter_type, **parameter_spec)
+ except EosConfigurationError as e:
+ batch_error(
+ f"Parameter '{parameter_name}' in task '{self._task_id}' validation error: {e}",
+ EosTaskValidationError,
+ )
+
+ def _validate_all_required_parameters_provided(self) -> None:
+ """
+ Validate that all required parameters are provided in the parameter dictionary.
+ """
+ missing_parameters = self._get_missing_required_task_parameters()
+
+ if missing_parameters:
+ raise EosTaskValidationError(
+ f"Task '{self._task_id}' is missing required input parameters: {missing_parameters}"
+ )
+
+ def _get_missing_required_task_parameters(self) -> list[str]:
+ """
+ Get all the missing required parameters in the parameter dictionary.
+ """
+ required_parameters = self._get_required_input_parameters()
+ return [
+ parameter_name for parameter_name in required_parameters if parameter_name not in self._input_parameters
+ ]
+
+ def _get_required_input_parameters(self) -> list[str]:
+ """
+ Get all the required input parameters for the task.
+ """
+ return [param for param, spec in self._task_spec.input_parameters.items() if "value" not in spec]
diff --git a/eos/configuration/validation/task_sequence/task_sequence_input_container_validator.py b/eos/configuration/validation/task_sequence/task_sequence_input_container_validator.py
new file mode 100644
index 0000000..84bf328
--- /dev/null
+++ b/eos/configuration/validation/task_sequence/task_sequence_input_container_validator.py
@@ -0,0 +1,92 @@
+from eos.configuration.entities.experiment import ExperimentConfig
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.exceptions import EosTaskValidationError
+from eos.configuration.validation import validation_utils
+from eos.configuration.validation.container_registry import ContainerRegistry
+from eos.configuration.validation.task_sequence.base_task_sequence_validator import BaseTaskSequenceValidator
+from eos.configuration.validation.task_sequence.task_input_container_validator import TaskInputContainerValidator
+
+
+class TaskSequenceInputContainerValidator(BaseTaskSequenceValidator):
+ """
+ Validate the input containers of every task in a task sequence.
+ """
+
+ def __init__(
+ self,
+ experiment_config: ExperimentConfig,
+ lab_configs: list[LabConfig],
+ ):
+ super().__init__(experiment_config, lab_configs)
+ self._container_registry = ContainerRegistry(experiment_config, lab_configs)
+
+ def validate(self) -> None:
+ for task in self._experiment_config.tasks:
+ self._validate_input_containers(task)
+
+ def _validate_input_containers(
+ self,
+ task: TaskConfig,
+ ) -> None:
+ """
+ Validate that a task gets the types and quantities of input containers it requires.
+ """
+ task_spec = self._tasks.get_spec_by_config(task)
+ if not task.containers and task_spec.input_containers:
+ raise EosTaskValidationError(f"Task '{task.id}' requires input containers but none were provided.")
+
+ input_container_validator = TaskInputContainerValidator(task, task_spec, self._container_registry)
+ input_container_validator.validate_input_containers()
+
+ self._validate_container_references(task)
+
+ def _validate_container_references(self, task: TaskConfig) -> None:
+ for container_name, container_id in task.containers.items():
+ if validation_utils.is_container_reference(container_id):
+ self._validate_container_reference(container_name, container_id, task)
+
+ def _validate_container_reference(
+ self,
+ container_name: str,
+ container_id: str,
+ task: TaskConfig,
+ ) -> None:
+ """
+ Ensure that a container reference is valid and that it conforms to the container specification.
+ """
+ referenced_task_id, referenced_container = container_id.split(".")
+
+ referenced_task = self._find_task_by_id(referenced_task_id)
+ if not referenced_task:
+ raise EosTaskValidationError(
+ f"Container '{container_name}' in task '{task.id}' references task '{referenced_task_id}' "
+ f"which does not exist."
+ )
+
+ referenced_task_spec = self._tasks.get_spec_by_config(referenced_task)
+
+ if referenced_container not in referenced_task_spec.output_containers:
+ raise EosTaskValidationError(
+ f"Container '{container_name}' in task '{task.id}' references container '{referenced_container}' "
+ f"which is not an output container of task '{referenced_task_id}'."
+ )
+
+ task_spec = self._tasks.get_spec_by_config(task)
+ if container_name not in task_spec.input_containers:
+ raise EosTaskValidationError(
+ f"Container '{container_name}' is not a valid input container for task '{task.id}'."
+ )
+
+ required_container_spec = task_spec.input_containers[container_name]
+ referenced_container_spec = referenced_task_spec.output_containers[referenced_container]
+
+ if required_container_spec.type != referenced_container_spec.type:
+ raise EosTaskValidationError(
+ f"Type mismatch for referenced container '{referenced_container}' in task '{task.id}'. "
+ f"The required container type is '{required_container_spec.type}' which does not match the referenced "
+ f"container type '{referenced_container_spec.type}'."
+ )
+
+ def _find_task_by_id(self, task_id: str) -> TaskConfig | None:
+ return next((task for task in self._experiment_config.tasks if task.id == task_id), None)
diff --git a/eos/configuration/validation/task_sequence/task_sequence_input_parameter_validator.py b/eos/configuration/validation/task_sequence/task_sequence_input_parameter_validator.py
new file mode 100644
index 0000000..19fa902
--- /dev/null
+++ b/eos/configuration/validation/task_sequence/task_sequence_input_parameter_validator.py
@@ -0,0 +1,95 @@
+from eos.configuration.entities.parameters import (
+ ParameterType,
+)
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.exceptions import (
+ EosTaskValidationError,
+)
+from eos.configuration.validation import validation_utils
+from eos.configuration.validation.task_sequence.base_task_sequence_validator import (
+ BaseTaskSequenceValidator,
+)
+from eos.configuration.validation.task_sequence.task_input_parameter_validator import (
+ TaskInputParameterValidator,
+)
+
+
+class TaskSequenceInputParameterValidator(BaseTaskSequenceValidator):
+ """
+ Validate the input parameters of every task in a task sequence.
+ """
+
+ def validate(self) -> None:
+ for task in self._experiment_config.tasks:
+ self._validate_input_parameters(task)
+
+ def _validate_input_parameters(self, task: TaskConfig) -> None:
+ task_spec = self._tasks.get_spec_by_config(task)
+
+ if task_spec.input_parameters is None and task.parameters is not None:
+ raise EosTaskValidationError(
+ f"Task '{task.id}' does not accept input parameters but parameters were provided."
+ )
+
+ parameter_validator = TaskInputParameterValidator(task, task_spec)
+ parameter_validator.validate_input_parameters()
+
+ self._validate_parameter_references(task)
+
+ def _validate_parameter_references(self, task: TaskConfig) -> None:
+ for parameter_name, parameter in task.parameters.items():
+ if validation_utils.is_parameter_reference(parameter):
+ self._validate_parameter_reference(parameter_name, task)
+
+ def _validate_parameter_reference(
+ self,
+ parameter_name: str,
+ task: TaskConfig,
+ ) -> None:
+ """
+ Ensure that a parameter reference is valid and that it conforms to the parameter specification.
+ """
+ parameter = task.parameters[parameter_name]
+ referenced_task_id, referenced_parameter = str(parameter).split(".")
+
+ referenced_task = self._find_task_by_id(referenced_task_id)
+ if not referenced_task:
+ raise EosTaskValidationError(
+ f"Parameter '{parameter_name}' in task '{task.id}' references task '{referenced_task_id}' "
+ f"which does not exist."
+ )
+
+ referenced_task_spec = self._tasks.get_spec_by_config(referenced_task)
+
+ referenced_parameter_spec = None
+ if (
+ referenced_task_spec.output_parameters
+ and referenced_task_spec.output_parameters
+ and referenced_parameter in referenced_task_spec.output_parameters
+ ):
+ referenced_parameter_spec = referenced_task_spec.output_parameters[referenced_parameter]
+ elif (
+ referenced_task_spec.input_parameters
+ and referenced_task_spec.input_parameters
+ and referenced_parameter in referenced_task_spec.input_parameters
+ ):
+ referenced_parameter_spec = referenced_task_spec.input_parameters[referenced_parameter]
+
+ if not referenced_parameter_spec:
+ raise EosTaskValidationError(
+ f"Parameter '{parameter_name}' in task '{task.id}' references parameter '{referenced_parameter}' "
+ f"which does not exist in task '{referenced_task_id}'."
+ )
+
+ task_spec = self._tasks.get_spec_by_config(task)
+ parameter_spec = task_spec.input_parameters[parameter_name]
+
+ if (
+ ParameterType(parameter_spec.type).python_type()
+ != ParameterType(referenced_parameter_spec.type).python_type()
+ ):
+ raise EosTaskValidationError(
+ f"Type mismatch for referenced parameter '{referenced_parameter}' in task '{task.id}'. "
+ f"The required parameter type is '{parameter_spec.type}' which does not match referenced the parameter "
+ f"type '{referenced_parameter_spec.type.value}'."
+ )
diff --git a/eos/configuration/validation/task_sequence_validator.py b/eos/configuration/validation/task_sequence_validator.py
new file mode 100644
index 0000000..45f6c8d
--- /dev/null
+++ b/eos/configuration/validation/task_sequence_validator.py
@@ -0,0 +1,111 @@
+import re
+from collections import Counter
+
+from eos.configuration.entities.experiment import ExperimentConfig
+from eos.configuration.entities.lab import LabConfig
+from eos.configuration.exceptions import EosTaskValidationError
+from eos.configuration.validation.task_sequence.base_task_sequence_validator import BaseTaskSequenceValidator
+from eos.configuration.validation.task_sequence.task_sequence_input_container_validator import (
+ TaskSequenceInputContainerValidator,
+)
+from eos.configuration.validation.task_sequence.task_sequence_input_parameter_validator import (
+ TaskSequenceInputParameterValidator,
+)
+from eos.logging.batch_error_logger import batch_error, raise_batched_errors
+
+
+class TaskSequenceValidator(BaseTaskSequenceValidator):
+ def __init__(
+ self,
+ experiment_config: ExperimentConfig,
+ lab_configs: list[LabConfig],
+ ):
+ super().__init__(experiment_config, lab_configs)
+ self._valid_task_id_pattern = re.compile("^[A-Za-z0-9_]+$")
+
+ def validate(self) -> None:
+ self._validate_tasks_exist()
+ self._validate_task_dependencies_exist()
+ self._validate_unique_task_ids()
+ self._validate_task_id_format()
+ self._validate_devices()
+ TaskSequenceInputContainerValidator(self._experiment_config, self._lab_configs).validate()
+ TaskSequenceInputParameterValidator(self._experiment_config, self._lab_configs).validate()
+
+ def _validate_tasks_exist(self) -> None:
+ for task in self._experiment_config.tasks:
+ if not self._tasks.spec_exists_by_config(task):
+ raise EosTaskValidationError(
+ f"Task '{task.id}' in experiment '{self._experiment_config.type}' does not exist."
+ )
+
+ def _validate_task_dependencies_exist(self) -> None:
+ for task in self._experiment_config.tasks:
+ for task_id in task.dependencies:
+ if not any(task.id == task_id for task in self._experiment_config.tasks):
+ raise EosTaskValidationError(
+ f"Task '{task_id}' in experiment '{self._experiment_config.type}' does not exist."
+ )
+
+ def _validate_unique_task_ids(self) -> None:
+ task_ids = [task.id for task in self._experiment_config.tasks]
+ if len(task_ids) != len(set(task_ids)):
+ raise EosTaskValidationError("All task IDs in the task sequence must be unique.")
+
+ def _validate_task_id_format(self) -> None:
+ for task in self._experiment_config.tasks:
+ if not self._valid_task_id_pattern.match(task.id):
+ raise EosTaskValidationError(
+ f"Task ID '{task.id}' is invalid. Task IDs can only contain letters, numbers, "
+ f"and underscores, with no spaces."
+ )
+
+ def _validate_devices(self) -> None:
+ experiment_type = self._experiment_config.type
+
+ for task in self._experiment_config.tasks:
+ task_spec = self._tasks.get_spec_by_config(task)
+ required_device_types = Counter(task_spec.device_types or [])
+ provided_device_types = Counter()
+ used_devices = set()
+
+ for device in task.devices:
+ lab_id = device.lab_id
+ device_id = device.id
+ if device_id in used_devices:
+ batch_error(
+ f"Duplicate device '{device_id}' in lab '{lab_id}' requested by task '{task.id}' of experiment "
+ f"'{experiment_type}' is not allowed.",
+ EosTaskValidationError,
+ )
+ continue
+
+ lab_config = self._find_lab_by_id(lab_id)
+ if not lab_config or device_id not in lab_config.devices:
+ batch_error(
+ f"Device '{device_id}' in lab '{lab_id}' requested by task '{task.id}' of experiment "
+ f"{experiment_type} does not exist.",
+ EosTaskValidationError,
+ )
+ continue
+
+ device_type = lab_config.devices[device_id].type
+ provided_device_types[device_type] += 1
+ used_devices.add(device_id)
+
+ # Check if all required device types are provided
+ missing_device_types = required_device_types - provided_device_types
+ if missing_device_types:
+ missing_counts_str = ", ".join(
+ [f"\n{count}x '{device_type}'" for device_type, count in missing_device_types.items()]
+ )
+ batch_error(
+ f"Task '{task.id}' of experiment '{experiment_type}' does not have all required device types "
+ f"satisfied. Missing device types: {missing_counts_str}",
+ EosTaskValidationError,
+ )
+
+ raise_batched_errors(EosTaskValidationError)
+
+ def _find_lab_by_id(self, lab_id: str) -> LabConfig:
+ return next((lab for lab in self._lab_configs if lab.type == lab_id), None)
diff --git a/eos/configuration/validation/validation_utils.py b/eos/configuration/validation/validation_utils.py
new file mode 100644
index 0000000..58d4c12
--- /dev/null
+++ b/eos/configuration/validation/validation_utils.py
@@ -0,0 +1,33 @@
+from eos.configuration.entities.parameters import (
+ AllowedParameterTypes,
+)
+
+
+def is_parameter_reference(parameter: AllowedParameterTypes) -> bool:
+ return (
+ isinstance(parameter, str)
+ and parameter.count(".") == 1
+ and all(component.strip() for component in parameter.split("."))
+ )
+
+
+def is_dynamic_parameter(parameter: AllowedParameterTypes) -> bool:
+ return isinstance(parameter, str) and parameter.lower() == "eos_dynamic"
+
+
+def is_dynamic_container(container_id: str) -> bool:
+ """
+ Check if the container ID is a dynamic container ID (eos_dynamic).
+ """
+ return isinstance(container_id, str) and container_id.lower() == "eos_dynamic"
+
+
+def is_container_reference(container_id: str) -> bool:
+ """
+ Check if the container ID is a reference.
+ """
+ return (
+ isinstance(container_id, str)
+ and container_id.count(".") == 1
+ and all(component.strip() for component in container_id.split("."))
+ )
diff --git a/eos/containers/__init__.py b/eos/containers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/containers/container_manager.py b/eos/containers/container_manager.py
new file mode 100644
index 0000000..b4e990f
--- /dev/null
+++ b/eos/containers/container_manager.py
@@ -0,0 +1,159 @@
+import threading
+from collections import defaultdict
+from typing import Any
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.containers.entities.container import Container
+from eos.containers.exceptions import EosContainerStateError
+from eos.containers.repositories.container_repository import ContainerRepository
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+
+
+class ContainerManager:
+ """
+ The container manager provides methods for interacting with containers in a lab.
+ """
+
+ def __init__(self, configuration_manager: ConfigurationManager, db_manager: DbManager):
+ self._configuration_manager = configuration_manager
+
+ self._containers = ContainerRepository("containers", db_manager)
+ self._containers.create_indices([("id", 1)], unique=True)
+ self._locks = defaultdict(threading.RLock)
+
+ self._create_containers()
+ log.debug("Container manager initialized.")
+
+ def get_container(self, container_id: str) -> Container:
+ """
+ Get a copy of the container with the specified ID.
+ """
+ container = self._containers.get_one(id=container_id)
+
+ if container:
+ return Container(**container)
+
+ raise EosContainerStateError(f"Container '{container_id}' does not exist.")
+
+ def get_containers(self, **query: dict[str, Any]) -> list[Container]:
+ """
+ Query containers with arbitrary parameters.
+
+ :param query: Dictionary of query parameters.
+ """
+ containers = self._containers.get_all(**query)
+ return [Container(**container) for container in containers]
+
+ def set_location(self, container_id: str, location: str) -> None:
+ """
+ Set the location of a container.
+ """
+ with self._get_lock(container_id):
+ self._containers.update({"location": location}, id=container_id)
+
+ def set_lab(self, container_id: str, lab: str) -> None:
+ """
+ Set the lab of a container.
+ """
+ with self._get_lock(container_id):
+ self._containers.update({"lab": lab}, id=container_id)
+
+ def set_metadata(self, container_id: str, metadata: dict[str, Any]) -> None:
+ """
+ Set metadata for a container.
+ """
+ with self._get_lock(container_id):
+ self._containers.update({"metadata": metadata}, id=container_id)
+
+ def add_metadata(self, container_id: str, metadata: dict[str, Any]) -> None:
+ """
+ Add metadata to a container.
+ """
+ container = self.get_container(container_id)
+ container.metadata.update(metadata)
+
+ with self._get_lock(container_id):
+ self._containers.update({"metadata": container.metadata}, id=container_id)
+
+ def remove_metadata(self, container_id: str, metadata_keys: list[str]) -> None:
+ """
+ Remove metadata from a container.
+ """
+ container = self.get_container(container_id)
+ for key in metadata_keys:
+ container.metadata.pop(key, None)
+
+ with self._get_lock(container_id):
+ self._containers.update({"metadata": container.metadata}, id=container_id)
+
+ def update_container(self, container: Container) -> None:
+ """
+ Update a container in the database.
+ """
+ self._containers.update(container.model_dump(), id=container.id)
+
+ def update_containers(self, loaded_labs: set[str] | None = None, unloaded_labs: set[str] | None = None) -> None:
+ """
+ Update containers based on loaded and unloaded labs.
+ """
+ if unloaded_labs:
+ for lab_id in unloaded_labs:
+ self._remove_containers_for_lab(lab_id)
+
+ if loaded_labs:
+ for lab_id in loaded_labs:
+ self._create_containers_for_lab(lab_id)
+
+ log.debug("Containers have been updated.")
+
+ def _remove_containers_for_lab(self, lab_id: str) -> None:
+ """
+ Remove containers associated with an unloaded lab.
+ """
+ containers_to_remove = self.get_containers(lab=lab_id)
+ for container in containers_to_remove:
+ self._containers.delete(id=container.id)
+ log.debug(f"Removed containers for lab '{lab_id}'")
+
+ def _create_containers_for_lab(self, lab_id: str) -> None:
+ """
+ Create containers for a loaded lab.
+ """
+ lab_config = self._configuration_manager.labs[lab_id]
+ for container_config in lab_config.containers:
+ for container_id in container_config.ids:
+ existing_container = self._containers.get_one(id=container_id)
+ if not existing_container:
+ container = Container(
+ id=container_id,
+ type=container_config.type,
+ lab=lab_id,
+ location=container_config.location,
+ metadata=container_config.metadata,
+ )
+ self._containers.update(container.model_dump(), id=container_id)
+ log.debug(f"Created containers for lab '{lab_id}'")
+
+ def _create_containers(self) -> None:
+ """
+ Create containers from the lab configuration and add them to the database.
+ """
+ for lab_name, lab_config in self._configuration_manager.labs.items():
+ for container_config in lab_config.containers:
+ for container_id in container_config.ids:
+ container = Container(
+ id=container_id,
+ type=container_config.type,
+ lab=lab_name,
+ location=container_config.location,
+ metadata=container_config.metadata,
+ )
+ self._containers.update(container.model_dump(), id=container_id)
+ log.debug("Created containers")
+
+ def _get_lock(self, container_id: str) -> threading.RLock:
+ """
+ Get the lock for a specific container.
+ """
+ return self._locks[container_id]
diff --git a/eos/containers/entities/__init__.py b/eos/containers/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/containers/entities/container.py b/eos/containers/entities/container.py
new file mode 100644
index 0000000..8e30d0d
--- /dev/null
+++ b/eos/containers/entities/container.py
@@ -0,0 +1,16 @@
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Container(BaseModel):
+ id: str
+ type: str
+ lab: str
+
+ location: str
+
+ metadata: dict[str, Any] = {}
+
+ class Config:
+ arbitrary_types_allowed = True
diff --git a/eos/containers/exceptions.py b/eos/containers/exceptions.py
new file mode 100644
index 0000000..6745a5f
--- /dev/null
+++ b/eos/containers/exceptions.py
@@ -0,0 +1,6 @@
+class EosContainerError(Exception):
+ pass
+
+
+class EosContainerStateError(EosContainerError):
+ pass
diff --git a/eos/containers/repositories/__init__.py b/eos/containers/repositories/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/containers/repositories/container_repository.py b/eos/containers/repositories/container_repository.py
new file mode 100644
index 0000000..cf3bb3a
--- /dev/null
+++ b/eos/containers/repositories/container_repository.py
@@ -0,0 +1,5 @@
+from eos.persistence.mongo_repository import MongoRepository
+
+
+class ContainerRepository(MongoRepository):
+ pass
diff --git a/eos/devices/__init__.py b/eos/devices/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/devices/base_device.py b/eos/devices/base_device.py
new file mode 100644
index 0000000..ce9f48f
--- /dev/null
+++ b/eos/devices/base_device.py
@@ -0,0 +1,167 @@
+import threading
+from abc import ABC, abstractmethod, ABCMeta
+from typing import Any
+
+from eos.devices.exceptions import (
+ EosDeviceInitializationError,
+ EosDeviceCleanupError,
+ EosDeviceError,
+)
+
+
+class DeviceStatus:
+ DISABLED = "DISABLED"
+ IDLE = "IDLE"
+ BUSY = "BUSY"
+ ERROR = "ERROR"
+
+
+def capture_exceptions(func: callable) -> callable:
+ def wrapper(self, *args, **kwargs) -> Any:
+ try:
+ return func(self, *args, **kwargs)
+ except Exception as e:
+ self._status = DeviceStatus.ERROR
+ raise EosDeviceError(f"Error in {func.__name__} in device {self._device_id}") from e
+
+ return wrapper
+
+
+class DeviceMeta(ABCMeta):
+ def __new__(cls, name: str, bases: tuple, dct: dict):
+ cls._add_exception_capture_to_child_methods(bases, dct)
+ return super().__new__(cls, name, bases, dct)
+
+ @staticmethod
+ def _add_exception_capture_to_child_methods(bases: tuple, dct: dict) -> None:
+ base_methods = set()
+ for base in bases:
+ if isinstance(base, DeviceMeta):
+ base_methods.update(base.__dict__.keys())
+
+ for attr, value in dct.items():
+ if callable(value) and not attr.startswith("__") and attr not in base_methods:
+ dct[attr] = capture_exceptions(value)
+
+
+class BaseDevice(ABC, metaclass=DeviceMeta):
+ """
+ The base class for all devices in EOS.
+ """
+
+ def __init__(
+ self,
+ device_id: str,
+ lab_id: str,
+ device_type: str,
+ initialization_parameters: dict[str, Any],
+ ):
+ self._device_id = device_id
+ self._lab_id = lab_id
+ self._device_type = device_type
+ self._status = DeviceStatus.DISABLED
+ self._initialization_parameters = initialization_parameters
+
+ self._lock = threading.Lock()
+
+ self.initialize(initialization_parameters)
+
+ def __del__(self):
+ if "_status" not in self.__dict__:
+ return
+ if self._status and self._status != DeviceStatus.DISABLED:
+ self.cleanup()
+
+ def initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ """
+ Initialize the device. After calling this method, the device is ready to be used for tasks
+ and the status is IDLE.
+ """
+ with self._lock:
+ if self._status != DeviceStatus.DISABLED:
+ raise EosDeviceInitializationError(f"Device {self._device_id} is already initialized.")
+
+ try:
+ self._initialize(initialization_parameters)
+ self._status = DeviceStatus.IDLE
+ except Exception as e:
+ self._status = DeviceStatus.ERROR
+ raise EosDeviceInitializationError(
+ f"Error initializing device {self._device_id}: {e!s}",
+ ) from e
+
+ def cleanup(self) -> None:
+ """
+ Clean up the device. After calling this method, the device can no longer be used for tasks and the status is
+ DISABLED.
+ """
+ with self._lock:
+ if self._status == DeviceStatus.BUSY:
+ raise EosDeviceCleanupError(
+ f"Device {self._device_id} is busy. Cannot perform cleanup.",
+ )
+
+ try:
+ self._cleanup()
+ self._status = DeviceStatus.DISABLED
+ except Exception as e:
+ self._status = DeviceStatus.ERROR
+ raise EosDeviceCleanupError(f"Error cleaning up device {self._device_id}: {e!s}") from e
+
+ def enable(self) -> None:
+ """
+ Enable the device. The status should be IDLE after calling this method.
+ """
+ with self._lock:
+ if self._status == DeviceStatus.DISABLED:
+ self.initialize(self._initialization_parameters)
+
+ def disable(self) -> None:
+ """
+ Disable the device. The status should be DISABLED after calling this method.
+ """
+ with self._lock:
+ if self._status != DeviceStatus.DISABLED:
+ self.cleanup()
+
+ def report(self) -> dict[str, Any]:
+ """
+ Return a dictionary with any member variables needed for logging purposes and progress tracking.
+ """
+ return self._report()
+
+ def report_status(self) -> dict[str, Any]:
+ """
+ Return a dictionary with the id and status of the task handler.
+ """
+ return {
+ "id": self._device_id,
+ "status": self._status,
+ }
+
+ def get_id(self) -> str:
+ return self._device_id
+
+ def get_type(self) -> str:
+ return self._device_type
+
+ def get_status(self) -> str:
+ return self._status
+
+ @abstractmethod
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ """
+ Implementation for the initialization of the device.
+ """
+
+ @abstractmethod
+ def _cleanup(self) -> None:
+ """
+ Implementation for the cleanup of the device.
+ """
+
+ @abstractmethod
+ def _report(self) -> dict[str, Any]:
+ """
+ Implementation for the report method.
+ """
diff --git a/eos/devices/device_actor_references.py b/eos/devices/device_actor_references.py
new file mode 100644
index 0000000..67156fa
--- /dev/null
+++ b/eos/devices/device_actor_references.py
@@ -0,0 +1,57 @@
+from dataclasses import dataclass
+
+from ray.actor import ActorHandle
+
+from eos.utils.ray_utils import RayActorWrapper
+
+
+@dataclass(frozen=True)
+class DeviceRayActorReference:
+ id: str
+ lab_id: str
+ type: str
+ actor_handle: ActorHandle
+
+
+@dataclass(frozen=True)
+class DeviceRayActorWrapperReference:
+ id: str
+ lab_id: str
+ type: str
+ ray_actor_wrapper: RayActorWrapper
+
+
+class DeviceRayActorWrapperReferences:
+ def __init__(self, devices: list[DeviceRayActorReference]):
+ self._devices_by_lab_and_id: dict[tuple[str, str], DeviceRayActorWrapperReference] = {}
+ self._devices_by_lab_id: dict[str, list[DeviceRayActorWrapperReference]] = {}
+ self._devices_by_type: dict[str, list[DeviceRayActorWrapperReference]] = {}
+
+ for device in devices:
+ device_actor_wrapper_reference = DeviceRayActorWrapperReference(
+ id=device.id,
+ lab_id=device.lab_id,
+ type=device.type,
+ ray_actor_wrapper=RayActorWrapper(device.actor_handle),
+ )
+ self._devices_by_lab_and_id[(device.lab_id, device.id)] = device_actor_wrapper_reference
+
+ if device.lab_id not in self._devices_by_lab_id:
+ self._devices_by_lab_id[device.lab_id] = []
+ self._devices_by_lab_id[device.lab_id].append(device_actor_wrapper_reference)
+
+ if device.type not in self._devices_by_type:
+ self._devices_by_type[device.type] = []
+ self._devices_by_type[device.type].append(device_actor_wrapper_reference)
+
+ def get(self, lab_id: str, device_id: str) -> RayActorWrapper | None:
+ device = self._devices_by_lab_and_id.get((lab_id, device_id))
+ return device.ray_actor_wrapper if device else None
+
+ def get_all_by_lab_id(self, lab_id: str) -> list[RayActorWrapper]:
+ devices = self._devices_by_lab_id.get(lab_id, [])
+ return [device.ray_actor_wrapper for device in devices]
+
+ def get_all_by_type(self, device_type: str) -> list[RayActorWrapper]:
+ devices = self._devices_by_type.get(device_type, [])
+ return [device.ray_actor_wrapper for device in devices]
diff --git a/eos/devices/device_manager.py b/eos/devices/device_manager.py
new file mode 100644
index 0000000..2d0f717
--- /dev/null
+++ b/eos/devices/device_manager.py
@@ -0,0 +1,198 @@
+from typing import Any
+
+import ray
+from omegaconf import OmegaConf
+from ray.actor import ActorHandle
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.configuration.constants import EOS_COMPUTER_NAME
+from eos.configuration.plugin_registries.device_plugin_registry import DevicePluginRegistry
+from eos.devices.entities.device import Device, DeviceStatus
+from eos.devices.exceptions import EosDeviceStateError, EosDeviceInitializationError
+from eos.logging.batch_error_logger import batch_error, raise_batched_errors
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.persistence.mongo_repository import MongoRepository
+
+
+class DeviceManager:
+ """
+ Provides methods for interacting with the devices in a lab.
+ """
+
+ def __init__(self, configuration_manager: ConfigurationManager, db_manager: DbManager):
+ self._configuration_manager = configuration_manager
+
+ self._devices = MongoRepository("devices", db_manager)
+ self._devices.create_indices([("lab_id", 1), ("id", 1)], unique=True)
+
+ self._device_plugin_registry = DevicePluginRegistry()
+ self._device_actor_handles: dict[str, ActorHandle] = {}
+ self._device_actor_computer_ips: dict[str, str] = {}
+
+ log.debug("Device manager initialized.")
+
+ def get_device(self, lab_id: str, device_id: str) -> Device | None:
+ """
+ Get a device by its ID.
+ """
+ device = self._devices.get_one(lab_id=lab_id, id=device_id)
+ if not device:
+ return None
+ return Device(**device)
+
+ def get_devices(self, **query: dict[str, Any]) -> list[Device]:
+ """
+ Query devices with arbitrary parameters.
+
+ :param query: Dictionary of query parameters.
+ """
+ devices = self._devices.get_all(**query)
+ return [Device(**device) for device in devices]
+
+ def set_device_status(self, lab_id: str, device_id: str, status: DeviceStatus) -> None:
+ """
+ Set the status of a device.
+ """
+ if not self._devices.exists(lab_id=lab_id, id=device_id):
+ raise EosDeviceStateError(f"Device '{device_id}' in lab '{lab_id}' does not exist.")
+
+ self._devices.update({"status": status.value}, lab_id=lab_id, id=device_id)
+
+ def get_device_actor(self, lab_id: str, device_id: str) -> ActorHandle:
+ """
+ Get the actor handle of a device.
+ """
+ actor_id = f"{lab_id}.{device_id}"
+ if actor_id not in self._device_actor_handles:
+ raise EosDeviceInitializationError(f"Device actor '{actor_id}' does not exist.")
+
+ return self._device_actor_handles.get(actor_id)
+
+ def update_devices(self, loaded_labs: set[str] | None = None, unloaded_labs: set[str] | None = None) -> None:
+ if unloaded_labs:
+ for lab_id in unloaded_labs:
+ self._remove_devices_for_lab(lab_id)
+
+ if loaded_labs:
+ for lab_id in loaded_labs:
+ self._create_devices_for_lab(lab_id)
+
+ self._check_device_actors_healthy()
+ log.debug("Devices have been updated.")
+
+ def cleanup_device_actors(self) -> None:
+ for actor in self._device_actor_handles.values():
+ ray.kill(actor)
+ self._device_actor_handles.clear()
+ self._device_actor_computer_ips.clear()
+ self._devices.delete()
+ log.info("All device actors have been cleaned up.")
+
+ def _remove_devices_for_lab(self, lab_id: str) -> None:
+ devices_to_remove = self.get_devices(lab_id=lab_id)
+ for device in devices_to_remove:
+ actor_id = device.get_actor_id()
+ if actor_id in self._device_actor_handles:
+ ray.kill(self._device_actor_handles[actor_id])
+ del self._device_actor_handles[actor_id]
+ del self._device_actor_computer_ips[actor_id]
+ self._devices.delete(lab_id=lab_id)
+ log.debug(f"Removed devices for lab '{lab_id}'")
+
+ def _create_devices_for_lab(self, lab_id: str) -> None:
+ lab_config = self._configuration_manager.labs[lab_id]
+ for device_id, device_config in lab_config.devices.items():
+ device = self.get_device(lab_id, device_id)
+
+ if device and device.get_actor_id() in self._device_actor_handles:
+ continue
+
+ if device and device.actor_handle:
+ self._restore_device_actor(device)
+ else:
+ device = Device(
+ lab_id=lab_id,
+ id=device_id,
+ type=device_config.type,
+ location=device_config.location,
+ computer=device_config.computer,
+ )
+ self._devices.update(device.model_dump(), lab_id=lab_id, id=device_id)
+ self._create_device_actor(device)
+
+ log.debug(f"Created devices for lab '{lab_id}'")
+
+ def _restore_device_actor(self, device: Device) -> None:
+ """
+ Restore a device actor registered in the database by looking up its actor in the Ray cluster.
+ """
+ device_actor_id = device.get_actor_id()
+ device_config = self._configuration_manager.labs[device.lab_id].devices[device.id]
+ self._device_actor_handles[device_actor_id] = ray.get_actor(device_actor_id)
+ self._device_actor_computer_ips[device_actor_id] = (
+ self._configuration_manager.labs[device.lab_id].computers[device_config.computer].ip
+ )
+ log.debug(f"Restored device actor {device_actor_id}")
+
+ def _create_device_actor(self, device: Device) -> None:
+ lab_config = self._configuration_manager.labs[device.lab_id]
+ device_config = lab_config.devices[device.id]
+ computer_name = device_config.computer.lower()
+
+ computer_ip = "127.0.0.1" if computer_name == EOS_COMPUTER_NAME else lab_config.computers[computer_name].ip
+
+ device_actor_id = device.get_actor_id()
+ self._device_actor_computer_ips[device_actor_id] = computer_ip
+
+ spec_initialization_parameters = (
+ self._configuration_manager.device_specs.get_spec_by_type(device.type).initialization_parameters or {}
+ )
+ if spec_initialization_parameters:
+ spec_initialization_parameters = OmegaConf.to_object(spec_initialization_parameters)
+
+ device_config_initialization_parameters = device_config.initialization_parameters or {}
+ if device_config_initialization_parameters:
+ device_config_initialization_parameters = OmegaConf.to_object(device_config_initialization_parameters)
+
+ initialization_parameters: dict[str, Any] = {
+ **spec_initialization_parameters,
+ **device_config_initialization_parameters,
+ }
+
+ resources = (
+ {"eos-core": 0.0001} if computer_ip in ["localhost", "127.0.0.1"] else {f"node:{computer_ip}": 0.0001}
+ )
+
+ device_class = ray.remote(self._device_plugin_registry.get_device_class_type(device.type))
+ self._device_actor_handles[device_actor_id] = device_class.options(
+ name=device_actor_id,
+ num_cpus=0,
+ resources=resources,
+ ).remote(device.id, device.lab_id, device.type, initialization_parameters)
+
+ def _check_device_actors_healthy(self) -> None:
+ status_reports = [actor_handle.report_status.remote() for actor_handle in self._device_actor_handles.values()]
+ status_report_to_device_actor_id = {
+ status_report: device_actor_id
+ for device_actor_id, status_report in zip(self._device_actor_handles.keys(), status_reports, strict=False)
+ }
+
+ ready_status_reports, not_ready_status_reports = ray.wait(
+ status_reports,
+ num_returns=len(self._device_actor_handles),
+ timeout=5,
+ )
+
+ for not_ready_ref in not_ready_status_reports:
+ device_actor_id = status_report_to_device_actor_id[not_ready_ref]
+ actor_handle = self._device_actor_handles[device_actor_id]
+ computer_ip = self._device_actor_computer_ips[device_actor_id]
+
+ ray.kill(actor_handle)
+
+ batch_error(
+ f"Device actor '{device_actor_id}' could not be reached on the computer {computer_ip}",
+ EosDeviceInitializationError,
+ )
+ raise_batched_errors(EosDeviceInitializationError)
diff --git a/eos/devices/entities/__init__.py b/eos/devices/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/devices/entities/device.py b/eos/devices/entities/device.py
new file mode 100644
index 0000000..409da9c
--- /dev/null
+++ b/eos/devices/entities/device.py
@@ -0,0 +1,32 @@
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, field_serializer, Field
+from ray.actor import ActorHandle
+
+
+class DeviceStatus(Enum):
+ ACTIVE = "ACTIVE"
+ INACTIVE = "INACTIVE"
+
+
+class Device(BaseModel):
+ id: str
+ lab_id: str
+ type: str
+ computer: str
+ location: str | None = None
+ status: DeviceStatus = DeviceStatus.ACTIVE
+ metadata: dict[str, Any] = {}
+
+ actor_handle: ActorHandle | None = Field(exclude=True, default=None)
+
+ class Config:
+ arbitrary_types_allowed = True
+
+ def get_actor_id(self) -> str:
+ return f"{self.lab_id}.{self.id}"
+
+ @field_serializer("status")
+ def status_enum_to_string(self, v: DeviceStatus) -> str:
+ return v.value
diff --git a/eos/devices/exceptions.py b/eos/devices/exceptions.py
new file mode 100644
index 0000000..8ccff45
--- /dev/null
+++ b/eos/devices/exceptions.py
@@ -0,0 +1,18 @@
+class EosDeviceError(Exception):
+ pass
+
+
+class EosDeviceStateError(EosDeviceError):
+ pass
+
+
+class EosDeviceClassNotFoundError(EosDeviceError):
+ pass
+
+
+class EosDeviceInitializationError(EosDeviceError):
+ pass
+
+
+class EosDeviceCleanupError(EosDeviceError):
+ pass
diff --git a/eos/eos.py b/eos/eos.py
new file mode 100755
index 0000000..280be90
--- /dev/null
+++ b/eos/eos.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python3
+
+import typer
+
+from eos.cli.orchestrator_cli import start_orchestrator
+from eos.cli.pkg_cli import pkg_app
+from eos.cli.web_api_cli import start_web_api
+
+eos_app = typer.Typer(pretty_exceptions_show_locals=False)
+eos_app.command(name="orchestrator", help="Start the EOS orchestrator")(start_orchestrator)
+eos_app.command(name="api", help="Start the EOS web API")(start_web_api)
+eos_app.add_typer(pkg_app, name="pkg", help="Manage EOS packages")
+
+if __name__ == "__main__":
+ eos_app()
diff --git a/eos/experiments/__init__.py b/eos/experiments/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/experiments/entities/__init__.py b/eos/experiments/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/experiments/entities/experiment.py b/eos/experiments/entities/experiment.py
new file mode 100644
index 0000000..a73f800
--- /dev/null
+++ b/eos/experiments/entities/experiment.py
@@ -0,0 +1,48 @@
+from datetime import datetime, timezone
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, field_serializer
+
+
+class ExperimentStatus(Enum):
+ CREATED = "CREATED"
+ RUNNING = "RUNNING"
+ COMPLETED = "COMPLETED"
+ SUSPENDED = "SUSPENDED"
+ CANCELLED = "CANCELLED"
+ FAILED = "FAILED"
+
+
+class ExperimentExecutionParameters(BaseModel):
+ resume: bool = False
+
+
+class Experiment(BaseModel):
+ id: str
+ type: str
+
+ execution_parameters: ExperimentExecutionParameters
+
+ status: ExperimentStatus = ExperimentStatus.CREATED
+
+ labs: list[str] = []
+
+ running_tasks: list[str] = []
+ completed_tasks: list[str] = []
+
+ dynamic_parameters: dict[str, dict[str, Any]] = {}
+
+ metadata: dict[str, Any] = {}
+
+ start_time: datetime | None = None
+ end_time: datetime | None = None
+
+ created_at: datetime = datetime.now(tz=timezone.utc)
+
+ class Config:
+ arbitrary_types_allowed = True
+
+ @field_serializer("status")
+ def status_enum_to_string(self, v: ExperimentStatus) -> str:
+ return v.value
diff --git a/eos/experiments/exceptions.py b/eos/experiments/exceptions.py
new file mode 100644
index 0000000..ae4890f
--- /dev/null
+++ b/eos/experiments/exceptions.py
@@ -0,0 +1,18 @@
+class EosExperimentError(Exception):
+ pass
+
+
+class EosExperimentStateError(EosExperimentError):
+ pass
+
+
+class EosExperimentTaskExecutionError(EosExperimentError):
+ pass
+
+
+class EosExperimentExecutionError(EosExperimentError):
+ pass
+
+
+class EosExperimentCancellationError(EosExperimentError):
+ pass
diff --git a/eos/experiments/experiment_executor.py b/eos/experiments/experiment_executor.py
new file mode 100644
index 0000000..2c33050
--- /dev/null
+++ b/eos/experiments/experiment_executor.py
@@ -0,0 +1,304 @@
+import asyncio
+from typing import Any
+
+from eos.configuration.experiment_graph.experiment_graph import ExperimentGraph
+from eos.configuration.validation import validation_utils
+from eos.containers.container_manager import ContainerManager
+from eos.experiments.entities.experiment import ExperimentStatus, ExperimentExecutionParameters, Experiment
+from eos.experiments.exceptions import (
+ EosExperimentExecutionError,
+ EosExperimentTaskExecutionError,
+ EosExperimentCancellationError,
+)
+from eos.experiments.experiment_manager import ExperimentManager
+from eos.logging.logger import log
+from eos.scheduling.abstract_scheduler import AbstractScheduler
+from eos.scheduling.entities.scheduled_task import ScheduledTask
+from eos.tasks.entities.task import TaskOutput
+from eos.tasks.entities.task_execution_parameters import TaskExecutionParameters
+from eos.tasks.exceptions import EosTaskExecutionError
+from eos.tasks.task_executor import TaskExecutor
+from eos.tasks.task_input_resolver import TaskInputResolver
+from eos.tasks.task_manager import TaskManager
+
+
+class ExperimentExecutor:
+ """Responsible for executing all the tasks of a single experiment."""
+
+ def __init__(
+ self,
+ experiment_id: str,
+ experiment_type: str,
+ execution_parameters: ExperimentExecutionParameters,
+ experiment_graph: ExperimentGraph,
+ experiment_manager: ExperimentManager,
+ task_manager: TaskManager,
+ container_manager: ContainerManager,
+ task_executor: TaskExecutor,
+ scheduler: AbstractScheduler,
+ ):
+ self._experiment_id = experiment_id
+ self._experiment_type = experiment_type
+ self._execution_parameters = execution_parameters
+ self._experiment_graph = experiment_graph
+ self._experiment_manager = experiment_manager
+ self._task_manager = task_manager
+ self._container_manager = container_manager
+ self._task_executor = task_executor
+ self._scheduler = scheduler
+ self._task_input_resolver = TaskInputResolver(task_manager, experiment_manager)
+
+ self._current_task_execution_parameters: dict[str, TaskExecutionParameters] = {}
+ self._task_output_futures: dict[str, asyncio.Task] = {}
+ self._experiment_status = None
+
+ def start_experiment(
+ self,
+ dynamic_parameters: dict[str, dict[str, Any]] | None = None,
+ metadata: dict[str, Any] | None = None,
+ ) -> None:
+ """
+ Start the experiment and register the executor with the scheduler.
+ """
+ experiment = self._experiment_manager.get_experiment(self._experiment_id)
+ if experiment:
+ self._handle_existing_experiment(experiment)
+ else:
+ self._create_new_experiment(dynamic_parameters, metadata)
+
+ self._scheduler.register_experiment(
+ experiment_id=self._experiment_id,
+ experiment_type=self._experiment_type,
+ experiment_graph=self._experiment_graph,
+ )
+
+ self._experiment_manager.start_experiment(self._experiment_id)
+ self._experiment_status = ExperimentStatus.RUNNING
+
+ log.info(f"{'Resumed' if self._execution_parameters.resume else 'Started'} experiment '{self._experiment_id}'.")
+
+ def _handle_existing_experiment(self, experiment: Experiment) -> None:
+ """
+ Handle cases when the experiment already exists.
+ """
+ self._experiment_status = experiment.status
+
+ if not self._execution_parameters.resume:
+ def _raise_error(status: str) -> None:
+ raise EosExperimentExecutionError(
+ f"Cannot start experiment '{self._experiment_id}' as it already exists and is '{status}'. "
+ f"Please create a new experiment or re-submit with 'resume=True'."
+ )
+
+ status_handlers = {
+ ExperimentStatus.COMPLETED: lambda: _raise_error("completed"),
+ ExperimentStatus.SUSPENDED: lambda: _raise_error("suspended"),
+ ExperimentStatus.CANCELLED: lambda: _raise_error("cancelled"),
+ ExperimentStatus.FAILED: lambda: _raise_error("failed"),
+ }
+ status_handlers.get(self._experiment_status, lambda: None)()
+
+ self._resume_experiment()
+
+ async def cancel_experiment(self) -> None:
+ """
+ Cancel the experiment.
+ """
+ experiment = self._experiment_manager.get_experiment(self._experiment_id)
+ if not experiment or experiment.status != ExperimentStatus.RUNNING:
+ raise EosExperimentCancellationError(
+ f"Cannot cancel experiment '{self._experiment_id}' with status '{experiment.status}'. "
+ f"It must be running."
+ )
+
+ log.warning(f"Cancelling experiment '{self._experiment_id}'...")
+ self._experiment_status = ExperimentStatus.CANCELLED
+ self._experiment_manager.cancel_experiment(self._experiment_id)
+ self._scheduler.unregister_experiment(self._experiment_id)
+ await self._cancel_running_tasks()
+
+ log.warning(f"Cancelled experiment '{self._experiment_id}'.")
+
+ async def progress_experiment(self) -> bool:
+ """
+ Try to progress the experiment by executing tasks.
+
+ :return: True if the experiment has been completed, False otherwise.
+ """
+ try:
+ if self._experiment_status != ExperimentStatus.RUNNING:
+ return self._experiment_status == ExperimentStatus.CANCELLED
+
+ if self._scheduler.is_experiment_completed(self._experiment_id):
+ self._complete_experiment()
+ return True
+
+ self._process_completed_tasks()
+ await self._execute_tasks()
+
+ return False
+ except Exception as e:
+ self._fail_experiment()
+ raise EosExperimentExecutionError(f"Error executing experiment '{self._experiment_id}'") from e
+
+ def _resume_experiment(self) -> None:
+ """
+ Resume an existing experiment.
+ """
+ self._experiment_manager.delete_non_completed_tasks(self._experiment_id)
+ log.info(f"Experiment '{self._experiment_id}' resumed.")
+
+ def _create_new_experiment(self, dynamic_parameters: dict[str, dict[str, Any]], metadata: dict[str, Any]) -> None:
+ """
+ Create a new experiment with the given parameters.
+ """
+ dynamic_parameters = dynamic_parameters or {}
+ self._validate_dynamic_parameters(dynamic_parameters)
+ self._experiment_manager.create_experiment(
+ experiment_id=self._experiment_id,
+ experiment_type=self._experiment_type,
+ execution_parameters=self._execution_parameters,
+ dynamic_parameters=dynamic_parameters,
+ metadata=metadata,
+ )
+
+ async def _cancel_running_tasks(self) -> None:
+ """
+ Cancel all running tasks in the experiment.
+ """
+ cancellation_futures = [
+ self._task_executor.request_task_cancellation(params.experiment_id, params.task_config.id)
+ for params in self._current_task_execution_parameters.values()
+ ]
+ try:
+ await asyncio.wait_for(asyncio.gather(*cancellation_futures), timeout=30)
+ except asyncio.TimeoutError as e:
+ raise EosExperimentExecutionError(
+ f"Timeout while cancelling experiment {self._experiment_id}. Some tasks may not have been cancelled."
+ ) from e
+
+ def _complete_experiment(self) -> None:
+ """
+ Complete the experiment and clean up.
+ """
+ self._scheduler.unregister_experiment(self._experiment_id)
+ self._experiment_manager.complete_experiment(self._experiment_id)
+ self._experiment_status = ExperimentStatus.COMPLETED
+
+ def _fail_experiment(self) -> None:
+ """
+ Fail the experiment.
+ """
+ self._scheduler.unregister_experiment(self._experiment_id)
+ self._experiment_manager.fail_experiment(self._experiment_id)
+ self._experiment_status = ExperimentStatus.FAILED
+
+ def _process_completed_tasks(self) -> None:
+ """
+ Process the output of completed tasks.
+ """
+ completed_tasks = [task_id for task_id, future in self._task_output_futures.items() if future.done()]
+ for task_id in completed_tasks:
+ self._process_task_output(task_id)
+
+ def _process_task_output(self, task_id: str) -> None:
+ """
+ Process the output of a single completed task.
+ """
+ try:
+ result = self._task_output_futures[task_id].result()
+ if result:
+ output_parameters, output_containers, output_files = result
+ self._update_containers(output_containers)
+ self._add_task_output(task_id, output_parameters, output_containers, output_files)
+ self._task_manager.complete_task(self._experiment_id, task_id)
+ log.info(f"EXP '{self._experiment_id}' - Completed task '{task_id}'.")
+ except EosTaskExecutionError as e:
+ raise EosExperimentTaskExecutionError(
+ f"Error executing task '{task_id}' of experiment '{self._experiment_id}'"
+ ) from e
+ finally:
+ del self._task_output_futures[task_id]
+ del self._current_task_execution_parameters[task_id]
+
+ def _update_containers(self, output_containers: dict[str, Any]) -> None:
+ """
+ Update containers with task output.
+ """
+ for container in output_containers.values():
+ self._container_manager.update_container(container)
+
+ def _add_task_output(
+ self,
+ task_id: str,
+ output_parameters: dict[str, Any],
+ output_containers: dict[str, Any],
+ output_files: dict[str, Any],
+ ) -> None:
+ """
+ Add task output to the task manager.
+ """
+ task_output = TaskOutput(
+ experiment_id=self._experiment_id,
+ task_id=task_id,
+ parameters=output_parameters,
+ containers=output_containers,
+ file_names=list(output_files.keys()),
+ )
+ for file_name, file_data in output_files.items():
+ self._task_manager.add_task_output_file(self._experiment_id, task_id, file_name, file_data)
+ self._task_manager.add_task_output(self._experiment_id, task_id, task_output)
+
+ async def _execute_tasks(self) -> None:
+ """
+ Request and execute new tasks from the scheduler.
+ """
+ new_scheduled_tasks = await self._scheduler.request_tasks(self._experiment_id)
+ for scheduled_task in new_scheduled_tasks:
+ if scheduled_task.id not in self._current_task_execution_parameters:
+ await self._execute_task(scheduled_task)
+
+ async def _execute_task(self, scheduled_task: ScheduledTask) -> None:
+ """
+ Execute a single task.
+ """
+ task_config = self._experiment_graph.get_task_config(scheduled_task.id)
+ task_config = self._task_input_resolver.resolve_task_inputs(self._experiment_id, task_config)
+ task_execution_parameters = TaskExecutionParameters(
+ task_id=scheduled_task.id,
+ experiment_id=self._experiment_id,
+ devices=scheduled_task.devices,
+ task_config=task_config,
+ )
+ self._task_output_futures[scheduled_task.id] = asyncio.create_task(
+ self._task_executor.request_task_execution(task_execution_parameters, scheduled_task)
+ )
+ self._current_task_execution_parameters[scheduled_task.id] = task_execution_parameters
+
+ def _validate_dynamic_parameters(self, dynamic_parameters: dict[str, dict[str, Any]]) -> None:
+ """
+ Validate that all required dynamic parameters are provided and there are no surplus parameters.
+ """
+ required_params = self._get_required_dynamic_parameters()
+ provided_params = {
+ f"{task_id}.{param_name}" for task_id, params in dynamic_parameters.items() for param_name in params
+ }
+
+ missing_params = required_params - provided_params
+ unexpected_params = provided_params - required_params
+
+ if missing_params:
+ raise EosExperimentExecutionError(f"Missing values for dynamic parameters: {missing_params}")
+ if unexpected_params:
+ raise EosExperimentExecutionError(f"Unexpected dynamic parameters provided: {unexpected_params}")
+
+ def _get_required_dynamic_parameters(self) -> set[str]:
+ """
+ Get a set of all required dynamic parameters in the experiment graph.
+ """
+ return {
+ f"{task_id}.{param_name}"
+ for task_id in self._experiment_graph.get_tasks()
+ for param_name, param_value in self._experiment_graph.get_task_config(task_id).parameters.items()
+ if validation_utils.is_dynamic_parameter(param_value)
+ }
diff --git a/eos/experiments/experiment_executor_factory.py b/eos/experiments/experiment_executor_factory.py
new file mode 100644
index 0000000..c2f3f37
--- /dev/null
+++ b/eos/experiments/experiment_executor_factory.py
@@ -0,0 +1,49 @@
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.configuration.experiment_graph.experiment_graph import ExperimentGraph
+from eos.containers.container_manager import ContainerManager
+from eos.experiments.entities.experiment import ExperimentExecutionParameters
+from eos.experiments.experiment_executor import ExperimentExecutor
+from eos.experiments.experiment_manager import ExperimentManager
+from eos.scheduling.abstract_scheduler import AbstractScheduler
+from eos.tasks.task_executor import TaskExecutor
+from eos.tasks.task_manager import TaskManager
+
+
+class ExperimentExecutorFactory:
+ """
+ Factory class to create ExperimentExecutor instances.
+ """
+
+ def __init__(
+ self,
+ configuration_manager: ConfigurationManager,
+ experiment_manager: ExperimentManager,
+ task_manager: TaskManager,
+ container_manager: ContainerManager,
+ task_executor: TaskExecutor,
+ scheduler: AbstractScheduler,
+ ):
+ self._configuration_manager = configuration_manager
+ self._experiment_manager = experiment_manager
+ self._task_manager = task_manager
+ self._container_manager = container_manager
+ self._task_executor = task_executor
+ self._scheduler = scheduler
+
+ def create(
+ self, experiment_id: str, experiment_type: str, execution_parameters: ExperimentExecutionParameters
+ ) -> ExperimentExecutor:
+ experiment_config = self._configuration_manager.experiments.get(experiment_type)
+ experiment_graph = ExperimentGraph(experiment_config)
+
+ return ExperimentExecutor(
+ experiment_id=experiment_id,
+ experiment_type=experiment_type,
+ execution_parameters=execution_parameters,
+ experiment_graph=experiment_graph,
+ experiment_manager=self._experiment_manager,
+ task_manager=self._task_manager,
+ container_manager=self._container_manager,
+ task_executor=self._task_executor,
+ scheduler=self._scheduler,
+ )
diff --git a/eos/experiments/experiment_manager.py b/eos/experiments/experiment_manager.py
new file mode 100644
index 0000000..8e2b5d1
--- /dev/null
+++ b/eos/experiments/experiment_manager.py
@@ -0,0 +1,174 @@
+from datetime import datetime, timezone
+from typing import Any
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.experiments.entities.experiment import Experiment, ExperimentStatus, ExperimentExecutionParameters
+from eos.experiments.exceptions import EosExperimentStateError
+from eos.experiments.repositories.experiment_repository import ExperimentRepository
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.tasks.entities.task import TaskStatus
+from eos.tasks.repositories.task_repository import TaskRepository
+
+
+class ExperimentManager:
+ """
+ Responsible for managing the state of all experiments in EOS and tracking their execution.
+ """
+
+ def __init__(self, configuration_manager: ConfigurationManager, db_manager: DbManager):
+ self._configuration_manager = configuration_manager
+ self._experiments = ExperimentRepository("experiments", db_manager)
+ self._experiments.create_indices([("id", 1)], unique=True)
+ self._tasks = TaskRepository("tasks", db_manager)
+
+ log.debug("Experiment manager initialized.")
+
+ def create_experiment(
+ self,
+ experiment_id: str,
+ experiment_type: str,
+ execution_parameters: ExperimentExecutionParameters | None = None,
+ dynamic_parameters: dict[str, dict[str, Any]] | None = None,
+ metadata: dict[str, Any] | None = None,
+ ) -> None:
+ """
+ Create a new experiment of a given type with a unique id.
+
+ :param experiment_id: A unique id for the experiment.
+ :param experiment_type: The type of the experiment as defined in the configuration.
+ :param dynamic_parameters: Dictionary of the dynamic parameters per task and their provided values.
+ :param execution_parameters: Parameters for the execution of the experiment.
+ :param metadata: Additional metadata to be stored with the experiment.
+ """
+ if self._experiments.get_one(id=experiment_id):
+ raise EosExperimentStateError(f"Experiment '{experiment_id}' already exists.")
+
+ experiment_config = self._configuration_manager.experiments.get(experiment_type)
+ if not experiment_config:
+ raise EosExperimentStateError(f"Experiment type '{experiment_type}' not found in the configuration.")
+
+ labs = experiment_config.labs
+
+ experiment = Experiment(
+ id=experiment_id,
+ type=experiment_type,
+ execution_parameters=execution_parameters or ExperimentExecutionParameters(),
+ labs=labs,
+ dynamic_parameters=dynamic_parameters or {},
+ metadata=metadata or {},
+ )
+ self._experiments.create(experiment.model_dump())
+
+ log.info(f"Created experiment '{experiment_id}'.")
+
+ def delete_experiment(self, experiment_id: str) -> None:
+ """
+ Delete an experiment.
+ """
+ if not self._experiments.exists(id=experiment_id):
+ raise EosExperimentStateError(f"Experiment '{experiment_id}' does not exist.")
+
+ self._experiments.delete(id=experiment_id)
+ self._tasks.delete(experiment_id=experiment_id)
+
+ log.info(f"Deleted experiment '{experiment_id}'.")
+
+ def start_experiment(self, experiment_id: str) -> None:
+ """
+ Start an experiment.
+ """
+ self._set_experiment_status(experiment_id, ExperimentStatus.RUNNING)
+
+ def complete_experiment(self, experiment_id: str) -> None:
+ """
+ Complete an experiment.
+ """
+ self._set_experiment_status(experiment_id, ExperimentStatus.COMPLETED)
+
+ def cancel_experiment(self, experiment_id: str) -> None:
+ """
+ Cancel an experiment.
+ """
+ self._set_experiment_status(experiment_id, ExperimentStatus.CANCELLED)
+
+ def suspend_experiment(self, experiment_id: str) -> None:
+ """
+ Suspend an experiment.
+ """
+ self._set_experiment_status(experiment_id, ExperimentStatus.SUSPENDED)
+
+ def fail_experiment(self, experiment_id: str) -> None:
+ """
+ Fail an experiment.
+ """
+ self._set_experiment_status(experiment_id, ExperimentStatus.FAILED)
+
+ def get_experiment(self, experiment_id: str) -> Experiment | None:
+ """
+ Get an experiment.
+ """
+ experiment = self._experiments.get_one(id=experiment_id)
+ return Experiment(**experiment) if experiment else None
+
+ def get_experiments(self, **query: dict[str, Any]) -> list[Experiment]:
+ """
+ Get experiments with a custom query.
+
+ :param query: Dictionary of query parameters.
+ """
+ experiments = self._experiments.get_all(**query)
+ return [Experiment(**experiment) for experiment in experiments]
+
+ def get_lab_experiments(self, lab: str) -> list[Experiment]:
+ """
+ Get all experiments associated with a lab.
+ """
+ experiments = self._experiments.get_experiments_by_lab(lab)
+ return [Experiment(**experiment) for experiment in experiments]
+
+ def get_running_tasks(self, experiment_id: str | None) -> set[str]:
+ """
+ Get the list of currently running tasks constrained by experiment ID.
+ """
+ experiment = self._experiments.get_one(id=experiment_id)
+ return set(experiment.get("running_tasks", {})) if experiment else {}
+
+ def get_completed_tasks(self, experiment_id: str) -> set[str]:
+ """
+ Get the list of completed tasks constrained by experiment ID.
+ """
+ experiment = self._experiments.get_one(id=experiment_id)
+ return set(experiment.get("completed_tasks", {})) if experiment else {}
+
+ def delete_non_completed_tasks(self, experiment_id: str) -> None:
+ """
+ Delete all tasks that are not completed in the given experiment.
+ """
+ experiment = self.get_experiment(experiment_id)
+
+ for task_id in experiment.running_tasks:
+ self._tasks.delete(experiment_id=experiment_id, id=task_id)
+ self._experiments.clear_running_tasks(experiment_id)
+
+ self._tasks.delete(experiment_id=experiment_id, status=TaskStatus.FAILED.value)
+ self._tasks.delete(experiment_id=experiment_id, status=TaskStatus.CANCELLED.value)
+
+ def _set_experiment_status(self, experiment_id: str, new_status: ExperimentStatus) -> None:
+ """
+ Set the status of an experiment.
+ """
+ if not self._experiments.exists(id=experiment_id):
+ raise EosExperimentStateError(f"Experiment '{experiment_id}' does not exist.")
+
+ update_fields = {"status": new_status.value}
+ if new_status == ExperimentStatus.RUNNING:
+ update_fields["start_time"] = datetime.now(tz=timezone.utc)
+ elif new_status in [
+ ExperimentStatus.COMPLETED,
+ ExperimentStatus.CANCELLED,
+ ExperimentStatus.FAILED,
+ ]:
+ update_fields["end_time"] = datetime.now(tz=timezone.utc)
+
+ self._experiments.update(update_fields, id=experiment_id)
diff --git a/eos/experiments/repositories/__init__.py b/eos/experiments/repositories/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/experiments/repositories/experiment_repository.py b/eos/experiments/repositories/experiment_repository.py
new file mode 100644
index 0000000..d925d52
--- /dev/null
+++ b/eos/experiments/repositories/experiment_repository.py
@@ -0,0 +1,45 @@
+from eos.experiments.entities.experiment import ExperimentStatus
+from eos.persistence.mongo_repository import MongoRepository
+
+
+class ExperimentRepository(MongoRepository):
+ def get_experiments_by_lab(self, lab_type: str) -> list[dict]:
+ return self._collection.find({"labs": {"$in": [lab_type]}})
+
+ def add_running_task(self, experiment_id: str, task_id: str) -> None:
+ self._collection.update_one(
+ {"id": experiment_id},
+ {"$addToSet": {"running_tasks": task_id}},
+ )
+
+ def delete_running_task(self, experiment_id: str, task_id: str) -> None:
+ self._collection.update_one(
+ {"id": experiment_id},
+ {"$pull": {"running_tasks": task_id}},
+ )
+
+ def clear_running_tasks(self, experiment_id: str) -> None:
+ self._collection.update_one(
+ {"id": experiment_id},
+ {"$set": {"running_tasks": []}},
+ )
+
+ def move_task_queue(self, experiment_id: str, task_id: str, source: str, target: str) -> None:
+ self._collection.update_one(
+ {"id": experiment_id},
+ {"$pull": {source: task_id}, "$addToSet": {target: task_id}},
+ )
+
+ def get_experiment_ids_by_campaign(self, campaign_id: str, status: ExperimentStatus | None = None) -> list[str]:
+ """
+ Get all experiment IDs of a campaign with an optional status filter.
+
+ :param campaign_id: The ID of the campaign.
+ :param status: Optional status to filter experiments.
+ :return: A list of experiment IDs.
+ """
+ query = {"id": {"$regex": f"^{campaign_id}"}}
+ if status:
+ query["status"] = status.value
+
+ return [doc["id"] for doc in self._collection.find(query, {"id": 1})]
diff --git a/eos/logging/__init__.py b/eos/logging/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/logging/batch_error_logger.py b/eos/logging/batch_error_logger.py
new file mode 100644
index 0000000..c418520
--- /dev/null
+++ b/eos/logging/batch_error_logger.py
@@ -0,0 +1,30 @@
+class BatchErrorLogger:
+ """
+ The BatchErrorLogger class is used to batch-log errors together. Instead of printing
+ errors as they occur, they are stored in a list and can be printed all at once.
+ """
+
+ def __init__(self):
+ self.errors: list[tuple[str, type[Exception]]] = []
+
+ def batch_error(self, message: str, exception_type: type[Exception]) -> None:
+ self.errors.append((message, exception_type))
+
+ def raise_batched_errors(self, root_exception_type: type[Exception] = Exception) -> None:
+ if self.errors:
+ error_messages = "\n\n".join(
+ f"{message} ({exception_type.__name__})" for message, exception_type in self.errors
+ )
+ self.errors.clear()
+ raise root_exception_type(error_messages)
+
+
+def batch_error(message: str, exception_type: type[Exception]) -> None:
+ batch_logger.batch_error(message, exception_type)
+
+
+def raise_batched_errors(root_exception_type: type[Exception] = Exception) -> None:
+ batch_logger.raise_batched_errors(root_exception_type)
+
+
+batch_logger = BatchErrorLogger()
diff --git a/eos/logging/logger.py b/eos/logging/logger.py
new file mode 100644
index 0000000..fc8398e
--- /dev/null
+++ b/eos/logging/logger.py
@@ -0,0 +1,48 @@
+import logging
+from enum import Enum
+
+from eos.logging.rich_console_handler import RichConsoleHandler
+
+
+class LogLevel(Enum):
+ DEBUG = "DEBUG"
+ INFO = "INFO"
+ WARNING = "WARNING"
+ ERROR = "ERROR"
+
+
+class Logger:
+ """
+ The Logger class is used to log all kinds of messages in EOS. It provides a simple interface
+ for logging messages at different levels.
+ """
+
+ def __init__(self):
+ self.logger = logging.getLogger("rich")
+ self.logger.name = "eos"
+ self.logger.setLevel(logging.DEBUG)
+ self.logger.addHandler(RichConsoleHandler())
+
+ def set_level(self, level: LogLevel | str) -> None:
+ if isinstance(level, str):
+ level = LogLevel(level)
+ self.logger.setLevel(level.value)
+
+ def debug(self, message: str, *args, **kwargs) -> None:
+ stacklevel = kwargs.pop("stacklevel", 2)
+ self.logger.debug(message, *args, **kwargs, stacklevel=stacklevel)
+
+ def info(self, message: str, *args, **kwargs) -> None:
+ stacklevel = kwargs.pop("stacklevel", 2)
+ self.logger.info(message, *args, **kwargs, stacklevel=stacklevel)
+
+ def warning(self, message: str, *args, **kwargs) -> None:
+ stacklevel = kwargs.pop("stacklevel", 2)
+ self.logger.warning(message, *args, **kwargs, stacklevel=stacklevel)
+
+ def error(self, message: str, *args, **kwargs) -> None:
+ stacklevel = kwargs.pop("stacklevel", 2)
+ self.logger.error(message, *args, **kwargs, stacklevel=stacklevel)
+
+
+log = Logger()
diff --git a/eos/logging/rich_console_handler.py b/eos/logging/rich_console_handler.py
new file mode 100644
index 0000000..555d737
--- /dev/null
+++ b/eos/logging/rich_console_handler.py
@@ -0,0 +1,32 @@
+from datetime import datetime, timezone
+from logging import Handler, LogRecord
+from pathlib import Path
+from typing import ClassVar
+
+from rich.console import Console
+
+
+class RichConsoleHandler(Handler):
+ """
+ A logging handler that uses the Rich library to print logs to the console.
+ """
+
+ _LOG_COLORS: ClassVar = {
+ "DEBUG": "[cyan]",
+ "INFO": "[green]",
+ "WARNING": "[yellow]",
+ "ERROR": "[bold red]",
+ }
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.console = Console()
+
+ def emit(self, record: LogRecord) -> None:
+ time = datetime.now(tz=timezone.utc).strftime("%m/%d/%Y %H:%M:%S")
+ level = record.levelname
+ filename = Path(record.pathname).name
+ line_no = record.lineno
+
+ log_prefix = f"{self._LOG_COLORS.get(level, '[white]')}{level}[/] {time} {filename}:{line_no} -"
+ self.console.print(f"{log_prefix} {record.getMessage()}")
diff --git a/eos/monitoring/__init__.py b/eos/monitoring/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/monitoring/graceful_termination_monitor.py b/eos/monitoring/graceful_termination_monitor.py
new file mode 100644
index 0000000..549ece0
--- /dev/null
+++ b/eos/monitoring/graceful_termination_monitor.py
@@ -0,0 +1,34 @@
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.persistence.mongo_repository import MongoRepository
+from eos.utils.singleton import Singleton
+
+
+class GracefulTerminationMonitor(metaclass=Singleton):
+ """
+ The graceful termination monitor is responsible for tracking whether EOS has been terminated gracefully.
+ """
+
+ def __init__(self, db_manager: DbManager):
+ self._globals = MongoRepository("globals", db_manager)
+ self._globals.create_indices([("key", 1)], unique=True)
+
+ graceful_termination = self._globals.get_one(key="graceful_termination")
+ if not graceful_termination:
+ self._globals.create({"key": "graceful_termination", "terminated_gracefully": False})
+ self._terminated_gracefully = False
+ else:
+ self._terminated_gracefully = graceful_termination["terminated_gracefully"]
+ if not self._terminated_gracefully:
+ log.warning("EOS did not terminate gracefully!")
+
+ def previously_terminated_gracefully(self) -> bool:
+ return self._terminated_gracefully
+
+ def terminated_gracefully(self) -> None:
+ self._set_terminated_gracefully(True)
+ log.debug("EOS terminated gracefully.")
+
+ def _set_terminated_gracefully(self, value: bool) -> None:
+ self._terminated_gracefully = value
+ self._globals.update({"terminated_gracefully": value}, key="graceful_termination")
diff --git a/eos/optimization/__init__.py b/eos/optimization/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/optimization/abstract_sequential_optimizer.py b/eos/optimization/abstract_sequential_optimizer.py
new file mode 100644
index 0000000..d85870a
--- /dev/null
+++ b/eos/optimization/abstract_sequential_optimizer.py
@@ -0,0 +1,54 @@
+from abc import ABC, abstractmethod
+
+import pandas as pd
+
+
+class AbstractSequentialOptimizer(ABC):
+ """
+ Abstract interface for a sequential optimizer.
+ At a minimum, the optimizer should give new parameters to clients, receive results from clients, and
+ report the best parameters found so far.
+ """
+
+ @abstractmethod
+ def sample(self, num_experiments: int = 1) -> pd.DataFrame:
+ """
+ Ask the optimizer for new experimental parameters. The experimental parameters are provided as a DataFrame,
+ with one row per experiment and one column per dynamic parameter in flat format (task_name/param_name).
+
+ :param num_experiments: The number of experiments for which to request new parameters.
+ """
+
+ @abstractmethod
+ def report(self, inputs_df: pd.DataFrame, outputs_df: pd.DataFrame) -> None:
+ """
+ Report the results of experiments to the optimizer.
+
+ :param inputs_df: A DataFrame with the input parameters for the experiments.
+ :param outputs_df: A DataFrame with the output parameters for the experiments.
+ """
+
+ @abstractmethod
+ def get_optimal_solutions(self) -> pd.DataFrame:
+ """
+ Get the set of best outputs found so far and the parameters that produced them.
+ This is the Pareto front.
+
+ :return: A dataframe with the best parameters and outputs found so far.
+ """
+
+ @abstractmethod
+ def get_input_names(self) -> list[str]:
+ """
+ Get the names of the input parameters.
+
+ :return: A list of the names of the input parameters.
+ """
+
+ @abstractmethod
+ def get_output_names(self) -> list[str]:
+ """
+ Get the names of the output values.
+
+ :return: A list of the names of the output parameter values.
+ """
diff --git a/eos/optimization/exceptions.py b/eos/optimization/exceptions.py
new file mode 100644
index 0000000..0edf105
--- /dev/null
+++ b/eos/optimization/exceptions.py
@@ -0,0 +1,2 @@
+class EosCampaignOptimizerDomainError(Exception):
+ pass
diff --git a/eos/optimization/sequential_bayesian_optimizer.py b/eos/optimization/sequential_bayesian_optimizer.py
new file mode 100644
index 0000000..85afc20
--- /dev/null
+++ b/eos/optimization/sequential_bayesian_optimizer.py
@@ -0,0 +1,164 @@
+import bofire.strategies.api as strategies
+import pandas as pd
+from bofire.data_models.acquisition_functions.acquisition_function import (
+ AcquisitionFunction,
+)
+from bofire.data_models.constraints.constraint import Constraint
+from bofire.data_models.domain.constraints import Constraints
+from bofire.data_models.domain.domain import Domain
+from bofire.data_models.domain.features import Inputs, Outputs
+from bofire.data_models.enum import SamplingMethodEnum
+from bofire.data_models.features.categorical import CategoricalInput, CategoricalOutput
+from bofire.data_models.features.continuous import ContinuousInput, ContinuousOutput
+from bofire.data_models.features.discrete import DiscreteInput
+from bofire.data_models.objectives.identity import MaximizeObjective, MinimizeObjective
+from bofire.data_models.objectives.target import CloseToTargetObjective
+from bofire.data_models.strategies.predictives.mobo import MoboStrategy
+from bofire.data_models.strategies.predictives.sobo import SoboStrategy
+from pandas import Series
+
+from eos.optimization.exceptions import EosCampaignOptimizerDomainError
+from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+class BayesianSequentialOptimizer(AbstractSequentialOptimizer):
+ """
+ Uses BoFire's Bayesian optimization to optimize the parameters of a series of experiments.
+ """
+
+ InputType = ContinuousInput | DiscreteInput | CategoricalInput
+ OutputType = ContinuousOutput | CategoricalOutput
+
+ def __init__(
+ self,
+ inputs: list[InputType],
+ outputs: list[OutputType],
+ constraints: list[Constraint],
+ acquisition_function: AcquisitionFunction,
+ num_initial_samples: int,
+ initial_sampling_method: SamplingMethodEnum = SamplingMethodEnum.SOBOL,
+ ):
+ self._acquisition_function: AcquisitionFunction = acquisition_function
+ self._num_initial_samples: int = num_initial_samples
+ self._initial_sampling_method: SamplingMethodEnum = initial_sampling_method
+ self._domain: Domain = Domain(
+ inputs=Inputs(features=inputs),
+ outputs=Outputs(features=outputs),
+ constraints=Constraints(constraints=constraints),
+ )
+ self._input_names = [input_feature.key for input_feature in self._domain.inputs.features]
+ self._output_names = [output_feature.key for output_feature in self._domain.outputs.features]
+
+ self._generate_initial_samples: bool = self._num_initial_samples > 0
+ self._initial_samples_df: pd.DataFrame | None = None
+ self._results_reported: int = 0
+
+ self._optimizer_data_model = (
+ SoboStrategy(domain=self._domain, acquisition_function=acquisition_function)
+ if len(outputs) == 1
+ else MoboStrategy(domain=self._domain, acquisition_function=acquisition_function)
+ )
+ self._optimizer = strategies.map(data_model=self._optimizer_data_model)
+
+ def sample(self, num_experiments: int = 1) -> pd.DataFrame:
+ if self._generate_initial_samples and self._results_reported < self._num_initial_samples:
+ if self._initial_samples_df is None:
+ self._generate_initial_samples_df()
+
+ if self._initial_samples_df is not None and not self._initial_samples_df.empty:
+ return self._fetch_and_remove_initial_samples(num_experiments)
+
+ self._initial_samples_df = None
+
+ new_parameters_df = self._optimizer.ask(candidate_count=num_experiments)
+
+ return new_parameters_df[self._input_names]
+
+ def _generate_initial_samples_df(self) -> None:
+ self._initial_samples_df = self._domain.inputs.sample(
+ n=self._num_initial_samples, method=self._initial_sampling_method
+ )
+
+ def _fetch_and_remove_initial_samples(self, num_experiments: int) -> pd.DataFrame:
+ num_experiments = min(num_experiments, len(self._initial_samples_df))
+ new_parameters_df = self._initial_samples_df.iloc[:num_experiments]
+ self._initial_samples_df = self._initial_samples_df.iloc[num_experiments:]
+ return new_parameters_df
+
+ def report(self, inputs_df: pd.DataFrame, outputs_df: pd.DataFrame) -> None:
+ self._validate_sample(inputs_df, outputs_df)
+ results_df = pd.concat([inputs_df, outputs_df], axis=1)
+ self._optimizer.tell(results_df)
+ self._results_reported += len(results_df)
+
+ def get_optimal_solutions(self) -> pd.DataFrame:
+ experiments = self._optimizer.experiments
+ outputs = self._domain.outputs.get_by_objective(
+ includes=[MaximizeObjective, MinimizeObjective, CloseToTargetObjective]
+ ).features
+
+ def is_dominated(exp: Series, other_exp: Series) -> bool:
+ at_least_one_worse = False
+ for output in outputs:
+ if isinstance(output.objective, MaximizeObjective):
+ if exp[output.key] > other_exp[output.key]:
+ return False
+ if exp[output.key] < other_exp[output.key]:
+ at_least_one_worse = True
+ elif isinstance(output.objective, MinimizeObjective):
+ if exp[output.key] < other_exp[output.key]:
+ return False
+ if exp[output.key] > other_exp[output.key]:
+ at_least_one_worse = True
+ elif isinstance(output.objective, CloseToTargetObjective):
+ target = output.objective.target
+ if abs(exp[output.key] - target) < abs(other_exp[output.key] - target):
+ return False
+ if abs(exp[output.key] - target) > abs(other_exp[output.key] - target):
+ at_least_one_worse = True
+ return at_least_one_worse
+
+ pareto_solutions = [
+ exp
+ for i, exp in experiments.iterrows()
+ if not any(is_dominated(exp, other_exp) for j, other_exp in experiments.iterrows() if i != j)
+ ]
+
+ result_df = pd.DataFrame(pareto_solutions)
+
+ # 'valid_' columns are generated by BoFire
+ filtered_columns = [col for col in result_df.columns if not col.startswith("valid_")]
+
+ return result_df[filtered_columns]
+
+ def get_input_names(self) -> list[str]:
+ return self._input_names
+
+ def get_output_names(self) -> list[str]:
+ return self._output_names
+
+ def _get_output(self, output_name: str) -> OutputType:
+ for output in self._domain.outputs.features:
+ if output.key == output_name:
+ return output
+
+ raise EosCampaignOptimizerDomainError(f"Output {output_name} not found in the optimization domain.")
+
+ def _validate_sample(self, inputs_df: pd.DataFrame, outputs_df: pd.DataFrame) -> None:
+ """
+ Validate that all expected input and output columns are present in their respective DataFrames.
+
+ :param inputs_df: DataFrame with input parameters for the experiments.
+ :param outputs_df: DataFrame with output parameters for the experiments.
+ :raises EosCampaignOptimizerDomainError: If any expected input or output columns are missing.
+ """
+ missing_inputs = set(self._input_names) - set(inputs_df.columns)
+ missing_outputs = set(self._output_names) - set(outputs_df.columns)
+
+ if missing_inputs or missing_outputs:
+ error_message = []
+ if missing_inputs:
+ error_message.append(f"Missing input columns: {', '.join(missing_inputs)}")
+ if missing_outputs:
+ error_message.append(f"Missing output columns: {', '.join(missing_outputs)}")
+ raise EosCampaignOptimizerDomainError(". ".join(error_message))
diff --git a/eos/optimization/sequential_optimizer_actor.py b/eos/optimization/sequential_optimizer_actor.py
new file mode 100644
index 0000000..f97f964
--- /dev/null
+++ b/eos/optimization/sequential_optimizer_actor.py
@@ -0,0 +1,27 @@
+from typing import Any
+
+import pandas as pd
+import ray
+
+from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+@ray.remote
+class SequentialOptimizerActor(AbstractSequentialOptimizer):
+ def __init__(self, constructor_args: dict[str, Any], optimizer_type: type[AbstractSequentialOptimizer]):
+ self.optimizer = optimizer_type(**constructor_args)
+
+ def sample(self, num_experiments: int = 1) -> pd.DataFrame:
+ return self.optimizer.sample(num_experiments)
+
+ def report(self, input_df: pd.DataFrame, output_df: pd.DataFrame) -> None:
+ self.optimizer.report(input_df, output_df)
+
+ def get_optimal_solutions(self) -> pd.DataFrame:
+ return self.optimizer.get_optimal_solutions()
+
+ def get_input_names(self) -> list[str]:
+ return self.optimizer.get_input_names()
+
+ def get_output_names(self) -> list[str]:
+ return self.optimizer.get_output_names()
diff --git a/eos/orchestration/__init__.py b/eos/orchestration/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/orchestration/exceptions.py b/eos/orchestration/exceptions.py
new file mode 100644
index 0000000..7841fe4
--- /dev/null
+++ b/eos/orchestration/exceptions.py
@@ -0,0 +1,18 @@
+class EosExperimentTypeInUseError(Exception):
+ pass
+
+
+class EosFailedExperimentRecoveryError(Exception):
+ pass
+
+
+class EosFailedCampaignRecoveryError(Exception):
+ pass
+
+
+class EosExperimentDoesNotExistError(Exception):
+ pass
+
+
+class EosError(Exception):
+ pass
diff --git a/eos/orchestration/orchestrator.py b/eos/orchestration/orchestrator.py
new file mode 100644
index 0000000..91969fc
--- /dev/null
+++ b/eos/orchestration/orchestrator.py
@@ -0,0 +1,721 @@
+import asyncio
+import atexit
+import traceback
+from asyncio import Lock as AsyncLock
+from collections.abc import AsyncIterable
+from typing import Any, TYPE_CHECKING
+
+import ray
+
+from eos.campaigns.campaign_executor_factory import CampaignExecutorFactory
+from eos.campaigns.campaign_manager import CampaignManager
+from eos.campaigns.campaign_optimizer_manager import CampaignOptimizerManager
+from eos.campaigns.entities.campaign import CampaignStatus, CampaignExecutionParameters, Campaign
+from eos.campaigns.exceptions import EosCampaignExecutionError
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.configuration.entities.lab import LabDeviceConfig
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.configuration.exceptions import EosConfigurationError
+from eos.configuration.validation import validation_utils
+from eos.containers.container_manager import ContainerManager
+from eos.devices.device_manager import DeviceManager
+from eos.experiments.entities.experiment import ExperimentStatus, Experiment, ExperimentExecutionParameters
+from eos.experiments.exceptions import EosExperimentExecutionError
+from eos.experiments.experiment_executor_factory import ExperimentExecutorFactory
+from eos.experiments.experiment_manager import ExperimentManager
+from eos.logging.logger import log
+from eos.monitoring.graceful_termination_monitor import GracefulTerminationMonitor
+from eos.orchestration.exceptions import (
+ EosExperimentTypeInUseError,
+ EosExperimentDoesNotExistError,
+ EosError,
+)
+from eos.persistence.db_manager import DbManager
+from eos.persistence.file_db_manager import FileDbManager
+from eos.persistence.service_credentials import ServiceCredentials
+from eos.resource_allocation.resource_allocation_manager import (
+ ResourceAllocationManager,
+)
+from eos.scheduling.basic_scheduler import BasicScheduler
+from eos.tasks.entities.task import Task, TaskStatus
+from eos.tasks.on_demand_task_executor import OnDemandTaskExecutor
+from eos.tasks.task_executor import TaskExecutor
+from eos.tasks.task_manager import TaskManager
+from eos.utils.singleton import Singleton
+
+if TYPE_CHECKING:
+ from eos.campaigns.campaign_executor import CampaignExecutor
+ from eos.experiments.experiment_executor import ExperimentExecutor
+
+
+class Orchestrator(metaclass=Singleton):
+ """
+ The top-level orchestrator that initializes and manages all EOS components.
+ """
+
+ def __init__(
+ self,
+ user_dir: str,
+ db_credentials: ServiceCredentials,
+ file_db_credentials: ServiceCredentials,
+ ):
+ self._user_dir = user_dir
+ self._db_credentials = db_credentials
+ self._file_db_credentials = file_db_credentials
+ self._initialized = False
+
+ self.initialize()
+ atexit.register(self.terminate)
+
+ def initialize(self) -> None:
+ """
+ Prepare the orchestrator. This is required before any other operations can be performed.
+ """
+ if self._initialized:
+ return
+
+ log.info("Initializing EOS...")
+ log.info("Initializing Ray cluster...")
+ ray.init(namespace="eos", resources={"eos-core": 1000})
+ log.info("Ray initialized.")
+
+ # Configuration ###########################################
+ self._configuration_manager = ConfigurationManager(self._user_dir)
+
+ # Persistence #############################################
+ self._db_manager = DbManager(self._db_credentials)
+ self._file_db_manager = FileDbManager(self._file_db_credentials)
+
+ # Monitoring ##############################################
+ self._graceful_termination_monitor = GracefulTerminationMonitor(self._db_manager)
+
+ # State management ########################################
+ self._device_manager = DeviceManager(self._configuration_manager, self._db_manager)
+ self._container_manager = ContainerManager(self._configuration_manager, self._db_manager)
+ self._resource_allocation_manager = ResourceAllocationManager(self._configuration_manager, self._db_manager)
+ self._task_manager = TaskManager(self._configuration_manager, self._db_manager, self._file_db_manager)
+ self._experiment_manager = ExperimentManager(self._configuration_manager, self._db_manager)
+ self._campaign_manager = CampaignManager(self._configuration_manager, self._db_manager)
+ self._campaign_optimizer_manager = CampaignOptimizerManager(self._db_manager)
+
+ # Execution ###############################################
+ self._task_executor = TaskExecutor(
+ self._task_manager,
+ self._device_manager,
+ self._container_manager,
+ self._resource_allocation_manager,
+ self._configuration_manager,
+ )
+ self._on_demand_task_executor = OnDemandTaskExecutor(
+ self._task_executor, self._task_manager, self._container_manager
+ )
+ self._scheduler = BasicScheduler(
+ self._configuration_manager,
+ self._experiment_manager,
+ self._task_manager,
+ self._device_manager,
+ self._resource_allocation_manager,
+ )
+ self._experiment_executor_factory = ExperimentExecutorFactory(
+ self._configuration_manager,
+ self._experiment_manager,
+ self._task_manager,
+ self._container_manager,
+ self._task_executor,
+ self._scheduler,
+ )
+ self._campaign_executor_factory = CampaignExecutorFactory(
+ self._configuration_manager,
+ self._campaign_manager,
+ self._campaign_optimizer_manager,
+ self._task_manager,
+ self._experiment_executor_factory,
+ )
+
+ self._campaign_submission_lock = AsyncLock()
+ self._submitted_campaigns: dict[str, CampaignExecutor] = {}
+ self._experiment_submission_lock = AsyncLock()
+ self._submitted_experiments: dict[str, ExperimentExecutor] = {}
+
+ self._campaign_cancellation_queue = asyncio.Queue(maxsize=100)
+ self._experiment_cancellation_queue = asyncio.Queue(maxsize=100)
+
+ self._loading_lock = AsyncLock()
+
+ self._fail_all_running_work()
+
+ self._initialized = True
+
+ def _fail_all_running_work(self) -> None:
+ """
+ When the orchestrator starts, fail all running tasks, experiments, and campaigns.
+ This is for safety, as if the orchestrator was terminated while there was running work then the state of the
+ system may be unknown. We want to force manual review of the state of the system and explicitly require
+ re-submission of any work that was running.
+ """
+ running_tasks = self._task_manager.get_tasks(status=TaskStatus.RUNNING.value)
+ for task in running_tasks:
+ self._task_manager.fail_task(task.experiment_id, task.id)
+ log.warning(f"EXP '{task.experiment_id}' - Failed task '{task.id}'.")
+
+ running_experiments = self._experiment_manager.get_experiments(status=ExperimentStatus.RUNNING.value)
+ for experiment in running_experiments:
+ self._experiment_manager.fail_experiment(experiment.id)
+
+ running_campaigns = self._campaign_manager.get_campaigns(status=CampaignStatus.RUNNING.value)
+ for campaign in running_campaigns:
+ self._campaign_manager.fail_campaign(campaign.id)
+
+ if running_tasks:
+ log.warning("All running tasks have been marked as failed. Please review the state of the system.")
+
+ if running_experiments:
+ log.warning(
+ "All running experiments have been marked as failed. Please review the state of the system and "
+ "re-submit with resume=True."
+ )
+
+ if running_campaigns:
+ log.warning(
+ "All running campaigns have been marked as failed. Please review the state of the system and re-submit "
+ "with resume=True."
+ )
+
+ def terminate(self) -> None:
+ """
+ Terminate the orchestrator. After this, no other operations can be performed.
+ This should be called before the program exits.
+ """
+ if not self._initialized:
+ return
+ log.info("Cleaning up device actors...")
+ self._device_manager.cleanup_device_actors()
+ log.info("Shutting down Ray cluster...")
+ ray.shutdown()
+ self._graceful_termination_monitor.terminated_gracefully()
+ self._initialized = False
+
+ def load_labs(self, labs: set[str]) -> None:
+ """
+ Load one or more labs into the orchestrator.
+ """
+ self._configuration_manager.load_labs(labs)
+ self._device_manager.update_devices(loaded_labs=labs)
+ self._container_manager.update_containers(loaded_labs=labs)
+
+ def unload_labs(self, labs: set[str]) -> None:
+ """
+ Unload one or more labs from the orchestrator.
+ """
+ self._configuration_manager.unload_labs(labs)
+ self._device_manager.update_devices(unloaded_labs=labs)
+ self._container_manager.update_containers(unloaded_labs=labs)
+
+ async def reload_labs(self, lab_types: set[str]) -> None:
+ """
+ Reload one or more labs in the orchestrator.
+ """
+ async with self._loading_lock:
+ experiments_to_reload = set()
+ for lab_type in lab_types:
+ existing_experiments = self._experiment_manager.get_experiments(status=ExperimentStatus.RUNNING.value)
+
+ for experiment in existing_experiments:
+ experiment_config = self._configuration_manager.experiments[experiment.type]
+ if lab_type in experiment_config.labs:
+ raise EosExperimentTypeInUseError(
+ f"Cannot reload lab type '{lab_type}' as there are running experiments that use it."
+ )
+
+ # Determine experiments to reload for this lab type
+ for experiment_type, experiment_config in self._configuration_manager.experiments.items():
+ if lab_type in experiment_config.labs:
+ experiments_to_reload.add(experiment_type)
+ try:
+ self.unload_labs(lab_types)
+ self.load_labs(lab_types)
+ self.load_experiments(experiments_to_reload)
+ except EosConfigurationError:
+ log.error(f"Error reloading labs: {traceback.format_exc()}")
+ raise
+
+ async def update_loaded_labs(self, lab_types: set[str]) -> None:
+ """
+ Update the loaded labs with new configurations.
+ """
+ async with self._loading_lock:
+ currently_loaded = set(self._configuration_manager.labs.keys())
+
+ if currently_loaded == lab_types:
+ return
+
+ to_unload = currently_loaded - lab_types
+ to_load = lab_types - currently_loaded
+
+ for lab_type in to_unload:
+ existing_experiments = self._experiment_manager.get_experiments(status=ExperimentStatus.RUNNING.value)
+
+ for experiment in existing_experiments:
+ experiment_config = self._configuration_manager.experiments[experiment.type]
+ if lab_type in experiment_config.labs:
+ raise EosExperimentTypeInUseError(
+ f"Cannot unload lab type '{lab_type}' as there are running experiments that use it."
+ )
+
+ try:
+ self.unload_labs(to_unload)
+ self.load_labs(to_load)
+ except EosConfigurationError:
+ log.error(f"Error updating loaded labs: {traceback.format_exc()}")
+ raise
+
+ async def get_lab_loaded_statuses(self) -> dict[str, bool]:
+ """
+ Return a dictionary of lab types and a boolean indicating whether they are loaded.
+ """
+ return self._configuration_manager.get_lab_loaded_statuses()
+
+ def load_experiments(self, experiment_types: set[str]) -> None:
+ """
+ Load one or more experiments into the orchestrator.
+ """
+ self._configuration_manager.load_experiments(experiment_types)
+
+ def unload_experiments(self, experiment_types: set[str]) -> None:
+ """
+ Unload one or more experiments from the orchestrator.
+ """
+ self._configuration_manager.unload_experiments(experiment_types)
+
+ async def reload_experiments(self, experiment_types: set[str]) -> None:
+ """
+ Reload one or more experiments in the orchestrator.
+ """
+ async with self._loading_lock:
+ for experiment_type in experiment_types:
+ existing_experiments = self._experiment_manager.get_experiments(
+ status=ExperimentStatus.RUNNING.value, type=experiment_type
+ )
+ if existing_experiments:
+ raise EosExperimentTypeInUseError(
+ f"Cannot reload experiment type '{experiment_type}' as there are running experiments of this "
+ f"type."
+ )
+ try:
+ self.unload_experiments(experiment_types)
+ self.load_experiments(experiment_types)
+ except EosConfigurationError:
+ log.error(f"Error reloading experiments: {traceback.format_exc()}")
+ raise
+
+ async def update_loaded_experiments(self, experiment_types: set[str]) -> None:
+ """
+ Update the loaded experiments with new configurations.
+ """
+ async with self._loading_lock:
+ currently_loaded = set(self._configuration_manager.experiments.keys())
+
+ if currently_loaded == experiment_types:
+ return
+
+ to_unload = currently_loaded - experiment_types
+ to_load = experiment_types - currently_loaded
+
+ for experiment_type in to_unload:
+ existing_experiments = self._experiment_manager.get_experiments(
+ status=ExperimentStatus.RUNNING.value, type=experiment_type
+ )
+ if existing_experiments:
+ raise EosExperimentTypeInUseError(
+ f"Cannot unload experiment type '{experiment_type}' as there are running experiments of this "
+ f"type."
+ )
+
+ try:
+ self.unload_experiments(to_unload)
+ self.load_experiments(to_load)
+ except EosConfigurationError:
+ log.error(f"Error updating loaded experiments: {traceback.format_exc()}")
+ raise
+
+ async def get_experiment_loaded_statuses(self) -> dict[str, bool]:
+ """
+ Return a dictionary of experiment types and a boolean indicating whether they are loaded.
+ """
+ return self._configuration_manager.get_experiment_loaded_statuses()
+
+ async def get_lab_devices(
+ self, lab_types: set[str] | None = None, task_type: str | None = None
+ ) -> dict[str, dict[str, LabDeviceConfig]]:
+ """
+ Get the devices that are available in the given labs or for a specific task type.
+
+ :param lab_types: The lab types. If None, all labs will be considered.
+ :param task_type: The task type. If provided, only devices supporting this task type will be returned.
+ :return: A dictionary of lab types and the devices available in each lab.
+ """
+ lab_devices = {}
+
+ if not lab_types or not any(lab_type.strip() for lab_type in lab_types):
+ lab_types = set(self._configuration_manager.labs.keys())
+
+ task_device_types = set()
+ if task_type:
+ task_spec = self._configuration_manager.task_specs.get_spec_by_type(task_type)
+ task_device_types = set(task_spec.device_types) if task_spec.device_types else set()
+
+ for lab_type in lab_types:
+ lab = self._configuration_manager.labs.get(lab_type)
+ if not lab:
+ continue
+
+ if task_device_types:
+ devices = {name: device for name, device in lab.devices.items() if device.type in task_device_types}
+ else:
+ devices = lab.devices
+
+ if devices:
+ lab_devices[lab_type] = devices
+
+ return lab_devices
+
+ async def get_task(self, experiment_id: str, task_id: str) -> Task:
+ """
+ Get a task by its unique identifier.
+
+ :param experiment_id: The unique identifier of the experiment.
+ :param task_id: The unique identifier of the task.
+ :return: The task entity.
+ """
+ return self._task_manager.get_task(experiment_id, task_id)
+
+ async def submit_task(
+ self,
+ task_config: TaskConfig,
+ resource_allocation_priority: int = 1,
+ resource_allocation_timeout: int = 180,
+ ) -> None:
+ """
+ Submit a new task for execution. By default, tasks submitted in this way have maximum resource allocation
+ priority and a timeout of 180 seconds.
+
+ :param task_config: The task configuration. This is the same data as defined in an experiment configuration.
+ :param resource_allocation_priority: The priority of the task in acquiring resources.
+ :param resource_allocation_timeout: The maximum seconds to wait for resources to be allocated before raising an
+ error.
+ :return: The output of the task.
+ """
+ await self._on_demand_task_executor.submit_task(
+ task_config, resource_allocation_priority, resource_allocation_timeout
+ )
+
+ async def cancel_task(self, task_id: str, experiment_id: str = "on_demand") -> None:
+ """
+ Cancel a task that is currently being executed.
+
+ :param task_id: The unique identifier of the task.
+ :param experiment_id: The unique identifier of the experiment.
+ """
+ if experiment_id == "on_demand":
+ await self._on_demand_task_executor.cancel_task(task_id)
+ else:
+ await self._task_executor.request_task_cancellation(experiment_id, task_id)
+
+ async def get_task_types(self) -> list[str]:
+ """
+ Get a list of all task types that are defined in the configuration.
+ """
+ return [task.type for task in self._configuration_manager.task_specs.get_all_specs().values()]
+
+ async def get_task_spec(self, task_type: str) -> TaskSpecification | None:
+ """
+ Get the task specification for a given task type.
+ """
+ task_spec = self._configuration_manager.task_specs.get_spec_by_type(task_type)
+ if not task_spec:
+ raise EosError(f"Task type '{task_type}' does not exist.")
+
+ return task_spec
+
+ def stream_task_output_file(
+ self, experiment_id: str, task_id: str, file_name: str, chunk_size: int = 3 * 1024 * 1024
+ ) -> AsyncIterable[bytes]:
+ """
+ Stream the contents of a task output file in chunks.
+ """
+ return self._task_manager.stream_task_output_file(experiment_id, task_id, file_name, chunk_size)
+
+ async def list_task_output_files(self, experiment_id: str, task_id: str) -> list[str]:
+ """
+ Get a list of all output files for a given task.
+ """
+ return self._task_manager.list_task_output_files(experiment_id, task_id)
+
+ async def get_experiment(self, experiment_id: str) -> Experiment | None:
+ """
+ Get an experiment by its unique identifier.
+
+ :param experiment_id: The unique identifier of the experiment.
+ :return: The experiment entity.
+ """
+ return self._experiment_manager.get_experiment(experiment_id)
+
+ async def submit_experiment(
+ self,
+ experiment_id: str,
+ experiment_type: str,
+ execution_parameters: ExperimentExecutionParameters,
+ dynamic_parameters: dict[str, dict[str, Any]],
+ metadata: dict[str, Any] | None = None,
+ ) -> None:
+ """
+ Submit a new experiment for execution. The experiment will be executed asynchronously.
+
+ :param experiment_id: The unique identifier of the experiment.
+ :param experiment_type: The type of the experiment. Must have a configuration defined in the
+ configuration manager.
+ :param execution_parameters: The execution parameters for the experiment.
+ :param dynamic_parameters: The dynamic parameters for the experiment.
+ :param metadata: Any additional metadata.
+ """
+ self._validate_experiment_type_exists(experiment_type)
+
+ async with self._experiment_submission_lock:
+ if experiment_id in self._submitted_experiments:
+ log.warning(f"Experiment '{experiment_id}' is already submitted. Ignoring new submission.")
+ return
+
+ experiment_executor = self._experiment_executor_factory.create(
+ experiment_id, experiment_type, execution_parameters
+ )
+
+ try:
+ experiment_executor.start_experiment(dynamic_parameters, metadata)
+ self._submitted_experiments[experiment_id] = experiment_executor
+ except EosExperimentExecutionError:
+ log.error(f"Failed to submit experiment '{experiment_id}': {traceback.format_exc()}")
+ del self._submitted_experiments[experiment_id]
+ return
+
+ log.info(f"Submitted experiment '{experiment_id}'.")
+
+ async def cancel_experiment(self, experiment_id: str) -> None:
+ """
+ Cancel an experiment that is currently being executed.
+
+ :param experiment_id: The unique identifier of the experiment.
+ """
+ if experiment_id in self._submitted_experiments:
+ await self._experiment_cancellation_queue.put(experiment_id)
+
+ async def get_experiment_types(self) -> list[str]:
+ """
+ Get a list of all experiment types that are defined in the configuration.
+ """
+ return list(self._configuration_manager.experiments.keys())
+
+ async def get_experiment_dynamic_params_template(self, experiment_type: str) -> dict[str, Any]:
+ """
+ Get the dynamic parameters template for a given experiment type.
+
+ :param experiment_type: The type of the experiment.
+ :return: The dynamic parameter template.
+ """
+ experiment_config = self._configuration_manager.experiments[experiment_type]
+ dynamic_parameters = {}
+
+ for task in experiment_config.tasks:
+ task_dynamic_parameters = {}
+ for parameter_name, parameter_value in task.parameters.items():
+ if validation_utils.is_dynamic_parameter(parameter_value):
+ task_dynamic_parameters[parameter_name] = "PLACEHOLDER"
+ if task_dynamic_parameters:
+ dynamic_parameters[task.id] = task_dynamic_parameters
+
+ return dynamic_parameters
+
+ async def get_campaign(self, campaign_id: str) -> Campaign | None:
+ """
+ Get a campaign by its unique identifier.
+
+ :param campaign_id: The unique identifier of the campaign.
+ :return: The campaign entity.
+ """
+ return self._campaign_manager.get_campaign(campaign_id)
+
+ async def submit_campaign(
+ self,
+ campaign_id: str,
+ experiment_type: str,
+ campaign_execution_parameters: CampaignExecutionParameters,
+ ) -> None:
+ self._validate_experiment_type_exists(experiment_type)
+
+ async with self._campaign_submission_lock:
+ if campaign_id in self._submitted_campaigns:
+ log.warning(f"Campaign '{campaign_id}' is already submitted. Ignoring new submission.")
+ return
+
+ campaign_executor = self._campaign_executor_factory.create(
+ campaign_id, experiment_type, campaign_execution_parameters
+ )
+
+ try:
+ await campaign_executor.start_campaign()
+ self._submitted_campaigns[campaign_id] = campaign_executor
+ except EosCampaignExecutionError:
+ log.error(f"Failed to submit campaign '{campaign_id}': {traceback.format_exc()}")
+ del self._submitted_campaigns[campaign_id]
+ return
+
+ log.info(f"Submitted campaign '{campaign_id}'.")
+
+ async def cancel_campaign(self, campaign_id: str) -> None:
+ """
+ Cancel a campaign that is currently being executed.
+
+ :param campaign_id: The unique identifier of the campaign.
+ """
+ if campaign_id in self._submitted_campaigns:
+ await self._campaign_cancellation_queue.put(campaign_id)
+
+ async def spin(self, rate_hz: int = 10) -> None:
+ """
+ Spin the orchestrator at a given rate in Hz.
+
+ :param rate_hz: The processing rate in Hz. This is the rate in which the orchestrator will check for progress in
+ submitted experiments and campaigns.
+ """
+ while True:
+ await self._process_experiment_and_campaign_cancellations()
+
+ await asyncio.gather(
+ self._process_on_demand_tasks(),
+ self._process_experiments(),
+ self._process_campaigns(),
+ )
+ self._resource_allocation_manager.process_active_requests()
+
+ await asyncio.sleep(1 / rate_hz)
+
+ async def _process_experiment_and_campaign_cancellations(self) -> None:
+ while not self._experiment_cancellation_queue.empty():
+ experiment_id = await self._experiment_cancellation_queue.get()
+
+ log.warning(f"Attempting to cancel experiment '{experiment_id}'.")
+ try:
+ await self._submitted_experiments[experiment_id].cancel_experiment()
+ finally:
+ del self._submitted_experiments[experiment_id]
+ log.warning(f"Cancelled experiment '{experiment_id}'.")
+
+ while not self._campaign_cancellation_queue.empty():
+ campaign_id = await self._campaign_cancellation_queue.get()
+
+ log.warning(f"Attempting to cancel campaign '{campaign_id}'.")
+ try:
+ await self._submitted_campaigns[campaign_id].cancel_campaign()
+ finally:
+ self._submitted_campaigns[campaign_id].cleanup()
+ del self._submitted_campaigns[campaign_id]
+ log.warning(f"Cancelled campaign '{campaign_id}'.")
+
+ async def _process_experiments(self) -> None:
+ to_remove_completed = []
+ to_remove_failed = []
+
+ for experiment_id, experiment_executor in self._submitted_experiments.items():
+ try:
+ completed = await experiment_executor.progress_experiment()
+
+ if completed:
+ to_remove_completed.append(experiment_id)
+ except EosExperimentExecutionError:
+ log.error(f"Error in experiment '{experiment_id}': {traceback.format_exc()}")
+ to_remove_failed.append(experiment_id)
+
+ for experiment_id in to_remove_completed:
+ log.info(f"Completed experiment '{experiment_id}'.")
+ del self._submitted_experiments[experiment_id]
+
+ for experiment_id in to_remove_failed:
+ log.error(f"Failed experiment '{experiment_id}'.")
+ del self._submitted_experiments[experiment_id]
+
+ async def _process_campaigns(self) -> None:
+ async def process_single_campaign(campaign_id: str, campaign_executor) -> tuple[str, bool, bool]:
+ try:
+ completed = await campaign_executor.progress_campaign()
+ return campaign_id, completed, False
+ except EosCampaignExecutionError:
+ log.error(f"Error in campaign '{campaign_id}': {traceback.format_exc()}")
+ return campaign_id, False, True
+
+ results = await asyncio.gather(
+ *(process_single_campaign(cid, executor) for cid, executor in self._submitted_campaigns.items()),
+ )
+
+ to_remove_completed: list[str] = []
+ to_remove_failed: list[str] = []
+
+ for campaign_id, completed, failed in results:
+ if completed:
+ to_remove_completed.append(campaign_id)
+ elif failed:
+ to_remove_failed.append(campaign_id)
+
+ for campaign_id in to_remove_completed:
+ log.info(f"Completed campaign '{campaign_id}'.")
+ self._submitted_campaigns[campaign_id].cleanup()
+ del self._submitted_campaigns[campaign_id]
+
+ for campaign_id in to_remove_failed:
+ log.error(f"Failed campaign '{campaign_id}'.")
+ self._submitted_campaigns[campaign_id].cleanup()
+ del self._submitted_campaigns[campaign_id]
+
+ async def _process_on_demand_tasks(self) -> None:
+ await self._on_demand_task_executor.process_tasks()
+
+ def _validate_experiment_type_exists(self, experiment_type: str) -> None:
+ if experiment_type not in self._configuration_manager.experiments:
+ raise EosExperimentDoesNotExistError(
+ f"Cannot submit experiment of type '{experiment_type}' as it does not exist."
+ )
+
+ @property
+ def configuration_manager(self) -> ConfigurationManager:
+ return self._configuration_manager
+
+ @property
+ def db_manager(self) -> DbManager:
+ return self._db_manager
+
+ @property
+ def device_manager(self) -> DeviceManager:
+ return self._device_manager
+
+ @property
+ def container_manager(self) -> ContainerManager:
+ return self._container_manager
+
+ @property
+ def resource_allocation_manager(self) -> ResourceAllocationManager:
+ return self._resource_allocation_manager
+
+ @property
+ def task_manager(self) -> TaskManager:
+ return self._task_manager
+
+ @property
+ def experiment_manager(self) -> ExperimentManager:
+ return self._experiment_manager
+
+ @property
+ def campaign_manager(self) -> CampaignManager:
+ return self._campaign_manager
+
+ @property
+ def task_executor(self) -> TaskExecutor:
+ return self._task_executor
diff --git a/eos/persistence/__init__.py b/eos/persistence/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/persistence/abstract_repository.py b/eos/persistence/abstract_repository.py
new file mode 100644
index 0000000..c9ffa7f
--- /dev/null
+++ b/eos/persistence/abstract_repository.py
@@ -0,0 +1,35 @@
+from abc import ABC, abstractmethod
+
+
+class AbstractRepository(ABC):
+ """
+ Abstract class for a repository that provides CRUD operations for a collection of entities.
+ """
+
+ @abstractmethod
+ def create(self, entity: dict) -> None:
+ pass
+
+ @abstractmethod
+ def count(self, **query: dict) -> int:
+ pass
+
+ @abstractmethod
+ def exists(self, count: int = 1, **query: dict) -> bool:
+ pass
+
+ @abstractmethod
+ def get_one(self, **query: dict) -> dict:
+ pass
+
+ @abstractmethod
+ def get_all(self, **query: dict) -> list[dict]:
+ pass
+
+ @abstractmethod
+ def update(self, entity_id: str, entity: dict) -> None:
+ pass
+
+ @abstractmethod
+ def delete(self, entity_id: str) -> None:
+ pass
diff --git a/eos/persistence/db_manager.py b/eos/persistence/db_manager.py
new file mode 100644
index 0000000..f2dbbd4
--- /dev/null
+++ b/eos/persistence/db_manager.py
@@ -0,0 +1,55 @@
+from pymongo import MongoClient
+from pymongo.client_session import ClientSession
+from pymongo.database import Database
+
+from eos.logging.logger import log
+from eos.persistence.service_credentials import ServiceCredentials
+
+
+class DbManager:
+ """
+ Responsible for giving access to a MongoDB database.
+ """
+
+ def __init__(
+ self,
+ db_credentials: ServiceCredentials,
+ db_name: str = "eos",
+ ):
+ self._db_credentials = db_credentials
+
+ self._db_client = MongoClient(
+ host=self._db_credentials.host,
+ port=self._db_credentials.port,
+ username=self._db_credentials.username,
+ password=self._db_credentials.password,
+ serverSelectionTimeoutMS=10000,
+ )
+
+ self._db: Database = self._db_client[db_name]
+
+ log.debug(f"Db manager initialized with database '{db_name}'.")
+
+ def get_db(self) -> Database:
+ """Get the database."""
+ return self._db
+
+ def create_collection_index(self, collection: str, index: list[tuple[str, int]], unique: bool = False) -> None:
+ """
+ Create an index for a collection in the database if it doesn't already exist.
+ :param collection: The collection name.
+ :param index: The index to create. A list of tuples of the field names and index orders.
+ :param unique: Whether the index should be unique.
+ """
+ index_name = "_".join(f"{field}_{order}" for field, order in index)
+ if index_name not in self._db[collection].index_information():
+ self._db[collection].create_index(index, unique=unique, name=index_name)
+
+ def start_session(self) -> ClientSession:
+ """Start a new client session."""
+ return self._db_client.start_session()
+
+ def clean_db(self) -> None:
+ """Clean the database."""
+ for collection in self._db.list_collection_names():
+ self._db[collection].drop()
diff --git a/eos/persistence/exceptions.py b/eos/persistence/exceptions.py
new file mode 100644
index 0000000..3b43bc6
--- /dev/null
+++ b/eos/persistence/exceptions.py
@@ -0,0 +1,2 @@
+class EosFileDbError(Exception):
+ pass
diff --git a/eos/persistence/file_db_manager.py b/eos/persistence/file_db_manager.py
new file mode 100644
index 0000000..f848aab
--- /dev/null
+++ b/eos/persistence/file_db_manager.py
@@ -0,0 +1,91 @@
+import io
+from collections.abc import AsyncIterable
+
+from minio import Minio, S3Error
+
+from eos.logging.logger import log
+from eos.persistence.exceptions import EosFileDbError
+from eos.persistence.service_credentials import ServiceCredentials
+
+
+class FileDbManager:
+ """
+ Responsible for storing and retrieving files from a MinIO server.
+ """
+
+ def __init__(self, file_db_credentials: ServiceCredentials, bucket_name: str = "eos"):
+ endpoint = f"{file_db_credentials.host}:{file_db_credentials.port}"
+
+ self._client = Minio(
+ endpoint,
+ access_key=file_db_credentials.username,
+ secret_key=file_db_credentials.password,
+ secure=False,
+ )
+ self._bucket_name = bucket_name
+
+ if not self._client.bucket_exists(self._bucket_name):
+ self._client.make_bucket(self._bucket_name)
+
+ log.debug("File database manager initialized.")
+
+ def store_file(self, path: str, file_data: bytes) -> None:
+ """
+ Store a file at the specified path.
+ """
+ try:
+ self._client.put_object(self._bucket_name, path, io.BytesIO(file_data), len(file_data))
+ log.debug(f"File at path '{path}' uploaded successfully.")
+ except S3Error as e:
+ raise EosFileDbError(f"Error uploading file at path '{path}': {e!s}") from e
+
+ def delete_file(self, path: str) -> None:
+ """
+ Delete a file at the specified path.
+ """
+ try:
+ self._client.remove_object(self._bucket_name, path)
+ log.debug(f"File at path '{path}' deleted successfully.")
+ except S3Error as e:
+ raise EosFileDbError(f"Error deleting file at path '{path}': {e!s}") from e
+
+ def get_file(self, path: str) -> bytes:
+ """
+ Retrieve an entire file at the specified path.
+ """
+ response = None
+ try:
+ response = self._client.get_object(self._bucket_name, path)
+ return response.read()
+ except S3Error as e:
+ raise EosFileDbError(f"Error retrieving file at path '{path}': {e!s}") from e
+ finally:
+ if response:
+ response.close()
+ response.release_conn()
+
+ async def stream_file(self, path: str, chunk_size: int = 3 * 1024 * 1024) -> AsyncIterable[bytes]:
+ """
+ Stream a file at the specified path. More memory efficient than get_file.
+ """
+ response = None
+ try:
+ response = self._client.get_object(self._bucket_name, path)
+ while True:
+ data = response.read(chunk_size)
+ if not data:
+ break
+ yield data
+ except S3Error as e:
+ raise EosFileDbError(f"Error streaming file at path '{path}': {e!s}") from e
+ finally:
+ if response:
+ response.close()
+ response.release_conn()
+
+ def list_files(self, prefix: str = "") -> list[str]:
+ """
+ List files with the specified prefix.
+ """
+ objects = self._client.list_objects(self._bucket_name, prefix=prefix, recursive=True)
+ return [obj.object_name for obj in objects]
diff --git a/eos/persistence/mongo_repository.py b/eos/persistence/mongo_repository.py
new file mode 100644
index 0000000..e2b0bcc
--- /dev/null
+++ b/eos/persistence/mongo_repository.py
@@ -0,0 +1,91 @@
+from typing import Any
+
+from pymongo.results import DeleteResult, UpdateResult, InsertOneResult
+
+from eos.persistence.abstract_repository import AbstractRepository
+from eos.persistence.db_manager import DbManager
+
+
+class MongoRepository(AbstractRepository):
+ """
+ Provides CRUD operations for a MongoDB collection.
+ """
+
+ def __init__(self, collection_name: str, db_manager: DbManager):
+ self._collection = db_manager.get_db().get_collection(collection_name)
+
+ def create_indices(self, indices: list[tuple[str, int]], unique: bool = False) -> None:
+ """
+ Create indices on the collection.
+
+ :param indices: List of tuples of field names and order (1 for ascending, -1 for descending).
+ :param unique: Whether the index should be unique.
+ """
+ index_name = "_".join(f"{field}_{order}" for field, order in indices)
+ if index_name not in self._collection.index_information():
+ self._collection.create_index(indices, unique=unique, name=index_name)
+
+ def create(self, entity: dict[str, Any]) -> InsertOneResult:
+ """
+ Create a new entity in the collection.
+
+ :param entity: The entity to create.
+ :return: The result of the insert operation.
+ """
+ return self._collection.insert_one(entity)
+
+ def count(self, **kwargs) -> int:
+ """
+ Count the number of entities that match the query in the collection.
+
+ :param kwargs: Query parameters.
+ :return: The number of entities.
+ """
+ return self._collection.count_documents(kwargs)
+
+ def exists(self, count: int = 1, **kwargs) -> bool:
+ """
+ Check if the number of entities that match the query exist in the collection.
+
+ :param count: The number of entities to check for.
+ :param kwargs: Query parameters.
+ :return: Whether the entity exists.
+ """
+ return self.count(**kwargs) >= count
+
+ def get_one(self, **kwargs) -> dict[str, Any]:
+ """
+ Get a single entity from the collection.
+
+ :param kwargs: Query parameters.
+ :return: The entity as a dictionary.
+ """
+ return self._collection.find_one(kwargs)
+
+ def get_all(self, **kwargs) -> list[dict[str, Any]]:
+ """
+ Get all entities from the collection.
+
+ :param kwargs: Query parameters.
+ :return: List of entities as dictionaries.
+ """
+ return list(self._collection.find(kwargs))
+
+ def update(self, entity: dict[str, Any], **kwargs) -> UpdateResult:
+ """
+ Update an entity in the collection.
+
+ :param entity: The updated entity (or some of its fields).
+ :param kwargs: Query parameters.
+ :return: The result of the update operation.
+ """
+ return self._collection.update_one(kwargs, {"$set": entity}, upsert=True)
+
+ def delete(self, **kwargs) -> DeleteResult:
+ """
+ Delete entities from the collection.
+
+ :param kwargs: Query parameters.
+ :return: The result of the delete operation.
+ """
+ return self._collection.delete_many(kwargs)
diff --git a/eos/persistence/service_credentials.py b/eos/persistence/service_credentials.py
new file mode 100644
index 0000000..97c0687
--- /dev/null
+++ b/eos/persistence/service_credentials.py
@@ -0,0 +1,9 @@
+from dataclasses import dataclass
+
+
+@dataclass
+class ServiceCredentials:
+ host: str
+ port: int
+ username: str
+ password: str
diff --git a/eos/resource_allocation/__init__.py b/eos/resource_allocation/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/resource_allocation/container_allocation_manager.py b/eos/resource_allocation/container_allocation_manager.py
new file mode 100644
index 0000000..fd0645a
--- /dev/null
+++ b/eos/resource_allocation/container_allocation_manager.py
@@ -0,0 +1,121 @@
+from typing import Any
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.persistence.mongo_repository import MongoRepository
+from eos.resource_allocation.entities.container_allocation import (
+ ContainerAllocation,
+)
+from eos.resource_allocation.exceptions import (
+ EosContainerAllocatedError,
+ EosContainerNotFoundError,
+)
+
+
+class ContainerAllocationManager:
+ """
+ Responsible for allocating containers to "owners".
+ An owner may be an experiment task, a human, etc. A container can only be held by one owner at a time.
+ """
+
+ def __init__(
+ self,
+ configuration_manager: ConfigurationManager,
+ db_manager: DbManager,
+ ):
+ self._configuration_manager = configuration_manager
+ self._allocations = MongoRepository("container_allocations", db_manager)
+ self._allocations.create_indices([("id", 1)], unique=True)
+
+ log.debug("Container allocator initialized.")
+
+ def allocate(self, container_id: str, owner: str, experiment_id: str | None = None) -> None:
+ """
+ Allocate a container to an owner.
+ """
+ if self.is_allocated(container_id):
+ raise EosContainerAllocatedError(f"Container '{container_id}' is already allocated.")
+
+ container_config = self._get_container_config(container_id)
+ allocation = ContainerAllocation(
+ id=container_id,
+ owner=owner,
+ container_type=container_config["type"],
+ lab=container_config["lab"],
+ experiment_id=experiment_id,
+ )
+ self._allocations.create(allocation.model_dump())
+
+ def deallocate(self, container_id: str) -> None:
+ """
+ Deallocate a container.
+ """
+ result = self._allocations.delete(id=container_id)
+ if result.deleted_count == 0:
+ log.warning(f"Container '{container_id}' is not allocated. No action taken.")
+ else:
+ log.debug(f"Deallocated container '{container_id}'.")
+
+ def is_allocated(self, container_id: str) -> bool:
+ """
+ Check if a container is allocated.
+ """
+ self._get_container_config(container_id)
+ return self._allocations.get_one(id=container_id) is not None
+
+ def get_allocation(self, container_id: str) -> ContainerAllocation | None:
+ """
+ Get the allocation details of a container.
+ """
+ self._get_container_config(container_id)
+ allocation = self._allocations.get_one(id=container_id)
+ return ContainerAllocation(**allocation) if allocation else None
+
+ def get_allocations(self, **query: dict[str, Any]) -> list[ContainerAllocation]:
+ """
+ Query allocations with arbitrary parameters.
+ """
+ allocations = self._allocations.get_all(**query)
+ return [ContainerAllocation(**allocation) for allocation in allocations]
+
+ def get_all_unallocated(self) -> list[str]:
+ """
+ Get all unallocated containers.
+ """
+ allocated_containers = [allocation.id for allocation in self.get_allocations()]
+ all_containers = [
+ container_id
+ for lab_config in self._configuration_manager.labs.values()
+ for container_config in lab_config.containers
+ for container_id in container_config.ids
+ ]
+ return list(set(all_containers) - set(allocated_containers))
+
+ def deallocate_all(self) -> None:
+ """
+ Deallocate all containers.
+ """
+ result = self._allocations.delete()
+ log.debug(f"Deallocated all {result.deleted_count} containers.")
+
+ def deallocate_all_by_owner(self, owner: str) -> None:
+ """
+ Deallocate all containers allocated to an owner.
+ """
+ result = self._allocations.delete(owner=owner)
+ if result.deleted_count == 0:
+ log.warning(f"Owner '{owner}' has no containers allocated. No action taken.")
+ else:
+ log.debug(f"Deallocated {result.deleted_count} containers for owner '{owner}'.")
+
+ def _get_container_config(self, container_id: str) -> dict:
+ for lab_config in self._configuration_manager.labs.values():
+ for container_config in lab_config.containers:
+ if container_id in container_config.ids:
+ return {
+ "type": container_config.type,
+ "lab": lab_config.type,
+ }
+
+ raise EosContainerNotFoundError(f"Container '{container_id}' not found in the configuration.")
diff --git a/eos/resource_allocation/device_allocation_manager.py b/eos/resource_allocation/device_allocation_manager.py
new file mode 100644
index 0000000..3e6c75c
--- /dev/null
+++ b/eos/resource_allocation/device_allocation_manager.py
@@ -0,0 +1,118 @@
+from typing import Any
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.persistence.mongo_repository import MongoRepository
+from eos.resource_allocation.entities.device_allocation import (
+ DeviceAllocation,
+)
+from eos.resource_allocation.exceptions import (
+ EosDeviceAllocatedError,
+ EosDeviceNotFoundError,
+)
+
+
+class DeviceAllocationManager:
+ """
+ Responsible for allocating devices to "owners".
+ An owner may be an experiment task, a human, etc. A device can only be held by one owner at a time.
+ """
+
+ def __init__(
+ self,
+ configuration_manager: ConfigurationManager,
+ db_manager: DbManager,
+ ):
+ self._configuration_manager = configuration_manager
+ self._allocations = MongoRepository("device_allocations", db_manager)
+ self._allocations.create_indices([("lab_id", 1), ("id", 1)], unique=True)
+
+ log.debug("Device allocator initialized.")
+
+ def allocate(self, lab_id: str, device_id: str, owner: str, experiment_id: str | None = None) -> None:
+ """
+ Allocate a device to an owner.
+ """
+ if self.is_allocated(lab_id, device_id):
+ raise EosDeviceAllocatedError(f"Device '{device_id}' in lab '{lab_id}' is already allocated.")
+
+ device_config = self._get_device_config(lab_id, device_id)
+ allocation = DeviceAllocation(
+ id=device_id,
+ lab_id=device_config["lab_id"],
+ owner=owner,
+ device_type=device_config["type"],
+ experiment_id=experiment_id,
+ )
+ self._allocations.create(allocation.model_dump())
+
+ def deallocate(self, lab_id: str, device_id: str) -> None:
+ """
+ Deallocate a device.
+ """
+ result = self._allocations.delete(lab_id=lab_id, id=device_id)
+ if result.deleted_count == 0:
+ log.warning(f"Device '{device_id}' in lab '{lab_id}' is not allocated. No action taken.")
+ else:
+ log.debug(f"Deallocated device '{device_id}' in lab '{lab_id}'.")
+
+ def is_allocated(self, lab_id: str, device_id: str) -> bool:
+ """
+ Check if a device is allocated.
+ """
+ self._get_device_config(lab_id, device_id)
+ return self._allocations.get_one(lab_id=lab_id, id=device_id) is not None
+
+ def get_allocation(self, lab_id: str, device_id: str) -> DeviceAllocation | None:
+ """
+ Get the allocation details of a device.
+ """
+ self._get_device_config(lab_id, device_id)
+ allocation = self._allocations.get_one(lab_id=lab_id, id=device_id)
+ return DeviceAllocation(**allocation) if allocation else None
+
+ def get_allocations(self, **query: dict[str, Any]) -> list[DeviceAllocation]:
+ """
+ Query device allocations with arbitrary parameters.
+ """
+ allocations = self._allocations.get_all(**query)
+ return [DeviceAllocation(**allocation) for allocation in allocations]
+
+ def get_all_unallocated(self) -> list[str]:
+ """
+ Get all unallocated devices.
+ """
+ allocated_devices = [allocation.id for allocation in self.get_allocations()]
+ all_devices = [
+ device_id for lab_config in self._configuration_manager.labs.values() for device_id in lab_config.devices
+ ]
+ return list(set(all_devices) - set(allocated_devices))
+
+ def deallocate_all_by_owner(self, owner: str) -> None:
+ """
+ Deallocate all devices allocated to an owner.
+ """
+ result = self._allocations.delete(owner=owner)
+ if result.deleted_count == 0:
+ log.warning(f"Owner '{owner}' has no devices allocated. No action taken.")
+ else:
+ log.debug(f"Deallocated {result.deleted_count} devices for owner '{owner}'.")
+
+ def deallocate_all(self) -> None:
+ """
+ Deallocate all devices.
+ """
+ result = self._allocations.delete()
+ log.debug(f"Deallocated all {result.deleted_count} devices.")
+
+ def _get_device_config(self, lab_id: str, device_id: str) -> dict[str, Any]:
+ lab = self._configuration_manager.labs.get(lab_id)
+ for dev_id, device_config in lab.devices.items():
+ if dev_id == device_id:
+ return {
+ "lab_id": lab.type,
+ "type": device_config.type,
+ }
+
+ raise EosDeviceNotFoundError(f"Device '{device_id}' in lab '{lab_id}' not found in the configuration.")
diff --git a/eos/resource_allocation/entities/__init__.py b/eos/resource_allocation/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/resource_allocation/entities/container_allocation.py b/eos/resource_allocation/entities/container_allocation.py
new file mode 100644
index 0000000..91e90f3
--- /dev/null
+++ b/eos/resource_allocation/entities/container_allocation.py
@@ -0,0 +1,7 @@
+from eos.resource_allocation.entities.resource_allocation import (
+ ResourceAllocation,
+)
+
+
+class ContainerAllocation(ResourceAllocation):
+ container_type: str
diff --git a/eos/resource_allocation/entities/device_allocation.py b/eos/resource_allocation/entities/device_allocation.py
new file mode 100644
index 0000000..fab32ca
--- /dev/null
+++ b/eos/resource_allocation/entities/device_allocation.py
@@ -0,0 +1,8 @@
+from eos.resource_allocation.entities.resource_allocation import (
+ ResourceAllocation,
+)
+
+
+class DeviceAllocation(ResourceAllocation):
+ lab_id: str
+ device_type: str
diff --git a/eos/resource_allocation/entities/resource_allocation.py b/eos/resource_allocation/entities/resource_allocation.py
new file mode 100644
index 0000000..3c32f98
--- /dev/null
+++ b/eos/resource_allocation/entities/resource_allocation.py
@@ -0,0 +1,14 @@
+from datetime import datetime, timezone
+
+from pydantic import BaseModel
+
+
+class ResourceAllocation(BaseModel):
+ id: str
+ owner: str
+ experiment_id: str | None = None
+ start_time: datetime | None = None
+ created_at: datetime = datetime.now(tz=timezone.utc)
+
+ class Config:
+ arbitrary_types_allowed = True
diff --git a/eos/resource_allocation/entities/resource_request.py b/eos/resource_allocation/entities/resource_request.py
new file mode 100644
index 0000000..b29e823
--- /dev/null
+++ b/eos/resource_allocation/entities/resource_request.py
@@ -0,0 +1,61 @@
+from datetime import datetime
+from enum import Enum
+
+from bson import ObjectId
+from pydantic import BaseModel, field_serializer, Field
+
+
+class ResourceType(Enum):
+ CONTAINER = "CONTAINER"
+ DEVICE = "DEVICE"
+
+
+class Resource(BaseModel):
+ id: str
+ lab_id: str
+ resource_type: ResourceType
+
+ @field_serializer("resource_type")
+ def resource_type_enum_to_string(self, v: ResourceType) -> str:
+ return v.value
+
+
+class ResourceAllocationRequest(BaseModel):
+ requester: str
+ resources: list[Resource] = []
+ experiment_id: str | None = None
+ reason: str | None = None
+ priority: int = Field(default=100, gt=0)
+
+ def add_resource(self, resource_id: str, lab_id: str, resource_type: ResourceType) -> None:
+ self.resources.append(Resource(id=resource_id, lab_id=lab_id, resource_type=resource_type))
+
+ def remove_resource(self, resource_id: str, lab_id: str, resource_type: ResourceType) -> None:
+ self.resources = [
+ r
+ for r in self.resources
+ if not (r.id == resource_id and r.lab_id == lab_id and r.resource_type == resource_type)
+ ]
+
+
+class ResourceRequestAllocationStatus(Enum):
+ PENDING = "PENDING"
+ ALLOCATED = "ALLOCATED"
+ COMPLETED = "COMPLETED"
+ ABORTED = "ABORTED"
+
+
+class ActiveResourceAllocationRequest(BaseModel):
+ id: ObjectId = Field(default_factory=ObjectId, alias="_id")
+ request: ResourceAllocationRequest
+ status: ResourceRequestAllocationStatus = ResourceRequestAllocationStatus.PENDING
+ created_at: datetime = Field(default_factory=datetime.utcnow)
+ allocated_at: datetime | None = None
+
+ class Config:
+ arbitrary_types_allowed = True
+ populate_by_name = True
+
+ @field_serializer("status")
+ def status_enum_to_string(self, v: ResourceRequestAllocationStatus) -> str:
+ return v.value
diff --git a/eos/resource_allocation/exceptions.py b/eos/resource_allocation/exceptions.py
new file mode 100644
index 0000000..551a807
--- /dev/null
+++ b/eos/resource_allocation/exceptions.py
@@ -0,0 +1,18 @@
+class EosResourceRequestError(Exception):
+ pass
+
+
+class EosDeviceAllocatedError(EosResourceRequestError):
+ pass
+
+
+class EosDeviceNotFoundError(EosResourceRequestError):
+ pass
+
+
+class EosContainerAllocatedError(EosResourceRequestError):
+ pass
+
+
+class EosContainerNotFoundError(EosResourceRequestError):
+ pass
diff --git a/eos/resource_allocation/repositories/__init__.py b/eos/resource_allocation/repositories/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/resource_allocation/repositories/resource_request_repository.py b/eos/resource_allocation/repositories/resource_request_repository.py
new file mode 100644
index 0000000..9998349
--- /dev/null
+++ b/eos/resource_allocation/repositories/resource_request_repository.py
@@ -0,0 +1,36 @@
+from eos.persistence.mongo_repository import MongoRepository
+from eos.resource_allocation.entities.resource_request import (
+ ResourceAllocationRequest,
+ ResourceRequestAllocationStatus,
+)
+
+
+class ResourceRequestRepository(MongoRepository):
+ def get_requests_prioritized(self, status: ResourceRequestAllocationStatus) -> list[dict]:
+ return self._collection.find({"status": status.value}).sort("request.priority", 1)
+
+ def get_existing_request(self, request: ResourceAllocationRequest) -> dict:
+ query = {
+ "request.resources": [r.model_dump() for r in request.resources],
+ "request.requester": request.requester,
+ "status": {
+ "$in": [
+ ResourceRequestAllocationStatus.PENDING.value,
+ ResourceRequestAllocationStatus.ALLOCATED.value,
+ ]
+ },
+ }
+
+ return self._collection.find_one(query)
+
+ def clean_requests(self) -> None:
+ self._collection.delete_many(
+ {
+ "status": {
+ "$in": [
+ ResourceRequestAllocationStatus.COMPLETED.value,
+ ResourceRequestAllocationStatus.ABORTED.value,
+ ]
+ }
+ }
+ )
diff --git a/eos/resource_allocation/resource_allocation_manager.py b/eos/resource_allocation/resource_allocation_manager.py
new file mode 100644
index 0000000..882c7ad
--- /dev/null
+++ b/eos/resource_allocation/resource_allocation_manager.py
@@ -0,0 +1,261 @@
+from collections.abc import Callable
+from datetime import datetime, timezone
+from threading import Lock
+
+from bson import ObjectId
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.resource_allocation.container_allocation_manager import ContainerAllocationManager
+from eos.resource_allocation.device_allocation_manager import DeviceAllocationManager
+from eos.resource_allocation.entities.resource_request import (
+ ResourceAllocationRequest,
+ ActiveResourceAllocationRequest,
+ ResourceRequestAllocationStatus,
+ ResourceType,
+)
+from eos.resource_allocation.exceptions import EosResourceRequestError
+from eos.resource_allocation.repositories.resource_request_repository import (
+ ResourceRequestRepository,
+)
+
+
+class ResourceAllocationManager:
+ """
+ Provides facilities to request allocation of resources.
+ """
+
+ def __init__(
+ self,
+ configuration_manager: ConfigurationManager,
+ db_manager: DbManager,
+ ):
+ self._device_allocation_manager = DeviceAllocationManager(configuration_manager, db_manager)
+ self._container_allocation_manager = ContainerAllocationManager(configuration_manager, db_manager)
+ self._active_requests = ResourceRequestRepository("resource_requests", db_manager)
+
+ # Callbacks for when resource allocation requests are processed
+ self._request_callbacks: dict[ObjectId, Callable[[ActiveResourceAllocationRequest], None]] = {}
+
+ self._lock = Lock()
+
+ self._delete_all_requests()
+ self._delete_all_allocations()
+
+ log.debug("Resource allocation manager initialized.")
+
+ def request_resources(
+ self,
+ request: ResourceAllocationRequest,
+ callback: Callable[[ActiveResourceAllocationRequest], None],
+ ) -> ActiveResourceAllocationRequest:
+ """
+ Request allocation of resources. A callback function is called when the resource allocation requests are
+ processed. If a resource allocation request already exists, the existing request is used instead of creating
+ a new one.
+
+ :param request: The resource allocation request.
+ :param callback: Callback function to be called when the resource allocation request is processed.
+ :return: List of active resource allocation requests.
+ """
+ with self._lock:
+ existing_request = self._find_existing_request(request)
+ if existing_request:
+ if existing_request.status in [
+ ResourceRequestAllocationStatus.PENDING,
+ ResourceRequestAllocationStatus.ALLOCATED,
+ ]:
+ self._request_callbacks[existing_request.id] = callback
+ return existing_request
+
+ active_request = ActiveResourceAllocationRequest(request=request)
+ result = self._active_requests.create(active_request.model_dump(by_alias=True))
+ active_request.id = result.inserted_id
+ self._request_callbacks[active_request.id] = callback
+ return active_request
+
+ def release_resources(self, active_request: ActiveResourceAllocationRequest) -> None:
+ """
+ Release the resources allocated for an active resource allocation request.
+
+ :param active_request: The active resource allocation request.
+ """
+ with self._lock:
+ for resource in active_request.request.resources:
+ if resource.resource_type == ResourceType.DEVICE:
+ self._device_allocation_manager.deallocate(resource.lab_id, resource.id)
+ elif resource.resource_type == ResourceType.CONTAINER:
+ self._container_allocation_manager.deallocate(resource.id)
+ else:
+ raise EosResourceRequestError(f"Unknown resource type: {resource.resource_type}")
+
+ self._update_request_status(active_request.id, ResourceRequestAllocationStatus.COMPLETED)
+
+ def process_active_requests(self) -> None:
+ with self._lock:
+ self._clean_completed_and_aborted_requests()
+
+ active_requests = self._get_all_active_requests_prioritized()
+
+ for active_request in active_requests:
+ if active_request.status != ResourceRequestAllocationStatus.PENDING:
+ continue
+
+ allocation_success = self._try_allocate(active_request)
+
+ if allocation_success:
+ self._invoke_request_callback(active_request)
+
+ def abort_active_request(self, request_id: ObjectId) -> None:
+ """
+ Abort an active resource allocation request.
+ """
+ with self._lock:
+ request = self.get_active_request(request_id)
+ for resource in request.request.resources:
+ if resource.resource_type == ResourceType.DEVICE:
+ self._device_allocation_manager.deallocate(resource.lab_id, resource.id)
+ elif resource.resource_type == ResourceType.CONTAINER:
+ self._container_allocation_manager.deallocate(resource.id)
+ self._update_request_status(request_id, ResourceRequestAllocationStatus.ABORTED)
+ active_request = self.get_active_request(request_id)
+ self._invoke_request_callback(active_request)
+
+ def _get_all_active_requests_prioritized(self) -> list[ActiveResourceAllocationRequest]:
+ """
+ Get all active resource allocation requests prioritized by the request priority in ascending order.
+ """
+ active_requests = []
+ active_requests_count = self._active_requests.count(status=ResourceRequestAllocationStatus.PENDING.value)
+
+ if active_requests_count > 0:
+ active_requests = self._active_requests.get_requests_prioritized(ResourceRequestAllocationStatus.PENDING)
+
+ return [ActiveResourceAllocationRequest(**request) for request in active_requests]
+
+ def get_all_active_requests(
+ self,
+ requester: str | None = None,
+ lab_id: str | None = None,
+ experiment_id: str | None = None,
+ status: ResourceRequestAllocationStatus | None = None,
+ ) -> list[ActiveResourceAllocationRequest]:
+ """
+ Get all active resource allocation requests.
+
+ :param requester: Filter by the requester.
+ :param lab_id: Filter by the lab ID.
+ :param experiment_id: Filter by the experiment ID.
+ :param status: Filter by the status.
+ """
+ query = {"requester": requester}
+ if lab_id:
+ query["request.lab_id"] = lab_id
+ if experiment_id:
+ query["request.experiment_id"] = experiment_id
+ if status:
+ query["status"] = status.value
+ active_requests = self._active_requests.get_all(**query)
+ return [ActiveResourceAllocationRequest(**request) for request in active_requests]
+
+ def get_active_request(self, request_id: ObjectId) -> ActiveResourceAllocationRequest | None:
+ """
+ Get an active resource allocation request by ID. If the request does not exist, returns None.
+ """
+ request = self._active_requests.get_one(_id=request_id)
+ return ActiveResourceAllocationRequest(**request) if request else None
+
+ @property
+ def device_allocation_manager(self) -> DeviceAllocationManager:
+ return self._device_allocation_manager
+
+ @property
+ def container_allocation_manager(self) -> ContainerAllocationManager:
+ return self._container_allocation_manager
+
+ def _update_request_status(self, request_id: ObjectId, status: ResourceRequestAllocationStatus) -> None:
+ """
+ Update the status of an active resource allocation request.
+ """
+ update_data = {"status": status.value}
+ if status == ResourceRequestAllocationStatus.ALLOCATED:
+ update_data["allocated_at"] = datetime.now(tz=timezone.utc)
+
+ self._active_requests.update(update_data, _id=request_id)
+
+ def _find_existing_request(self, request: ResourceAllocationRequest) -> ActiveResourceAllocationRequest | None:
+ """
+ Find an existing active resource allocation request that matches the given request.
+ """
+ existing_request = self._active_requests.get_existing_request(request)
+ return ActiveResourceAllocationRequest(**existing_request) if existing_request else None
+
+ def _invoke_request_callback(self, active_request: ActiveResourceAllocationRequest) -> None:
+ """
+ Invoke the allocation callback for an active resource allocation request.
+ """
+ callback = self._request_callbacks.pop(active_request.id, None)
+ if callback:
+ callback(active_request)
+
+ def _try_allocate(self, active_request: ActiveResourceAllocationRequest) -> bool:
+ temp_allocations = []
+ all_available = True
+
+ for resource in active_request.request.resources:
+ if resource.resource_type == ResourceType.DEVICE:
+ if not self._device_allocation_manager.is_allocated(resource.lab_id, resource.id):
+ temp_allocations.append(("device", resource.lab_id, resource.id))
+ else:
+ all_available = False
+ break
+ elif resource.resource_type == ResourceType.CONTAINER:
+ if not self._container_allocation_manager.is_allocated(resource.id):
+ temp_allocations.append(("container", resource.id))
+ else:
+ all_available = False
+ break
+ else:
+ raise EosResourceRequestError(f"Unknown resource type: {resource.resource_type}")
+
+ if all_available:
+ for allocation in temp_allocations:
+ if allocation[0] == "device":
+ self._device_allocation_manager.allocate(
+ allocation[1],
+ allocation[2],
+ active_request.request.requester,
+ experiment_id=active_request.request.experiment_id,
+ )
+ else: # container
+ self._container_allocation_manager.allocate(
+ allocation[1],
+ active_request.request.requester,
+ experiment_id=active_request.request.experiment_id,
+ )
+
+ self._update_request_status(active_request.id, ResourceRequestAllocationStatus.ALLOCATED)
+ active_request.status = ResourceRequestAllocationStatus.ALLOCATED
+ return True
+
+ return False
+
+ def _clean_completed_and_aborted_requests(self) -> None:
+ """
+ Remove completed or aborted active resource allocation requests.
+ """
+ self._active_requests.clean_requests()
+
+ def _delete_all_requests(self) -> None:
+ """
+ Delete all active resource allocation requests.
+ """
+ self._active_requests.delete()
+
+ def _delete_all_allocations(self) -> None:
+ """
+ Delete all device and container allocations.
+ """
+ self._device_allocation_manager.deallocate_all()
+ self._container_allocation_manager.deallocate_all()
diff --git a/eos/scheduling/__init__.py b/eos/scheduling/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/scheduling/abstract_scheduler.py b/eos/scheduling/abstract_scheduler.py
new file mode 100644
index 0000000..b253140
--- /dev/null
+++ b/eos/scheduling/abstract_scheduler.py
@@ -0,0 +1,42 @@
+from abc import ABC, abstractmethod
+
+from eos.configuration.experiment_graph.experiment_graph import ExperimentGraph
+from eos.scheduling.entities.scheduled_task import ScheduledTask
+
+
+class AbstractScheduler(ABC):
+ @abstractmethod
+ def register_experiment(self, experiment_id: str, experiment_type: str, experiment_graph: ExperimentGraph) -> None:
+ """
+ Register an experiment with the scheduler.
+
+ :param experiment_id: The ID of the experiment.
+ :param experiment_type: The type of the experiment.
+ :param experiment_graph: The task graph of the experiment's task sequence.
+ """
+
+ @abstractmethod
+ def unregister_experiment(self, experiment_id: str) -> None:
+ """
+ Unregister an experiment from the scheduler.
+
+ :param experiment_id: The ID of the experiment.
+ """
+
+ @abstractmethod
+ async def request_tasks(self, experiment_id: str) -> list[ScheduledTask]:
+ """
+ Request the next tasks to be executed for a specific experiment.
+
+ :param experiment_id: The ID of the experiment.
+ :return: A list of tasks to be executed next. Returns an empty list if no new tasks are available.
+ """
+
+ @abstractmethod
+ def is_experiment_completed(self, experiment_id: str) -> bool:
+ """
+ Check if an experiment has been completed.
+
+ :param experiment_id: The ID of the experiment.
+ :return: True if the experiment has been completed, False otherwise.
+ """
diff --git a/eos/scheduling/basic_scheduler.py b/eos/scheduling/basic_scheduler.py
new file mode 100644
index 0000000..030232e
--- /dev/null
+++ b/eos/scheduling/basic_scheduler.py
@@ -0,0 +1,280 @@
+import asyncio
+import threading
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.configuration.entities.task import TaskDeviceConfig, TaskConfig
+from eos.configuration.experiment_graph.experiment_graph import ExperimentGraph
+from eos.devices.device_manager import DeviceManager
+from eos.devices.entities.device import DeviceStatus
+from eos.experiments.experiment_manager import ExperimentManager
+from eos.logging.logger import log
+from eos.resource_allocation.entities.resource_request import (
+ ActiveResourceAllocationRequest,
+ ResourceAllocationRequest,
+ ResourceType,
+ ResourceRequestAllocationStatus,
+)
+from eos.resource_allocation.exceptions import EosResourceRequestError
+from eos.resource_allocation.resource_allocation_manager import ResourceAllocationManager
+from eos.scheduling.abstract_scheduler import AbstractScheduler
+from eos.scheduling.entities.scheduled_task import ScheduledTask
+from eos.scheduling.exceptions import EosSchedulerRegistrationError, EosSchedulerResourceAllocationError
+from eos.tasks.task_input_resolver import TaskInputResolver
+from eos.tasks.task_manager import TaskManager
+
+
+class BasicScheduler(AbstractScheduler):
+ """
+ The basic scheduler is responsible for scheduling experimental tasks based on their precedence constraints.
+ The task graph is a DAG. Each task in the experiment has dependencies to other tasks, as well as a device in
+ the lab. Each device can only be used by one task at a time. In addition, each task may have dependencies to certain
+ containers and output parameters generated from previous tasks. The precedence scheduler is responsible for
+ scheduling tasks based on their dependencies, device availability, and container and output parameter availability.
+
+ The scheduler should be able to schedule tasks across multiple experiments simultaneously and dynamically add and
+ remove experiments from task scheduling consideration.
+ """
+
+ def __init__(
+ self,
+ configuration_manager: ConfigurationManager,
+ experiment_manager: ExperimentManager,
+ task_manager: TaskManager,
+ device_manager: DeviceManager,
+ resource_allocation_manager: ResourceAllocationManager,
+ ):
+ self._configuration_manager = configuration_manager
+ self._experiment_manager = experiment_manager
+ self._task_input_resolver = TaskInputResolver(task_manager, experiment_manager)
+ self._device_manager = device_manager
+
+ self._resource_allocation_manager = resource_allocation_manager
+ self._device_allocation_manager = self._resource_allocation_manager.device_allocation_manager
+ self._container_allocation_manager = self._resource_allocation_manager.container_allocation_manager
+
+ self._registered_experiments = {}
+ self._allocated_resources: dict[str, dict[str, ActiveResourceAllocationRequest]] = {}
+ self._lock = threading.Lock()
+
+ log.debug("Basic scheduler initialized.")
+
+ def register_experiment(self, experiment_id: str, experiment_type: str, experiment_graph: ExperimentGraph) -> None:
+ """
+ Register an experiment for execution. The scheduler will also consider this experiment when tasks are requested.
+ The scheduler records the experiment's ID, type, and task graph.
+ """
+ with self._lock:
+ if experiment_type not in self._configuration_manager.experiments:
+ raise EosSchedulerRegistrationError(
+ f"Cannot register an experiment with the scheduler. Experiment '{experiment_type}' does not exist."
+ )
+ self._registered_experiments[experiment_id] = (experiment_type, experiment_graph)
+ log.debug("Experiment '%s' registered for scheduling.", experiment_id)
+
+ def unregister_experiment(self, experiment_id: str) -> None:
+ """
+ Unregister an experiment from the scheduler. The scheduler will no longer consider this experiment when tasks
+ are requested.
+ """
+ with self._lock:
+ if experiment_id in self._registered_experiments:
+ del self._registered_experiments[experiment_id]
+ self._release_experiment_resources(experiment_id)
+ else:
+ raise EosSchedulerRegistrationError(
+ f"Cannot unregister experiment {experiment_id} from the scheduler as it is not registered."
+ )
+
+ async def request_tasks(self, experiment_id: str) -> list[ScheduledTask]:
+ """
+ Request the next tasks to be executed for a specific experiment. Resources such as devices are
+ allocated for the tasks. The scheduler will only consider tasks that have all their dependencies met and have
+ available resources.
+
+ :param experiment_id: The ID of the experiment for which to request tasks.
+ :return: A list of tasks that are ready to be executed.
+ """
+ with self._lock:
+ if experiment_id not in self._registered_experiments:
+ raise EosSchedulerRegistrationError(
+ f"Cannot request tasks from the scheduler for unregistered experiment {experiment_id}."
+ )
+ experiment_type, experiment_graph = self._registered_experiments[experiment_id]
+
+ all_tasks = experiment_graph.get_topologically_sorted_tasks()
+ completed_tasks = self._experiment_manager.get_completed_tasks(experiment_id)
+ pending_tasks = [task_id for task_id in all_tasks if task_id not in completed_tasks]
+
+ # Release resources for completed tasks
+ for task_id in completed_tasks:
+ if task_id in self._allocated_resources.get(experiment_id, {}):
+ self._release_task_resources(experiment_id, task_id)
+
+ scheduled_tasks = []
+ for task_id in pending_tasks:
+ if not self._check_task_dependencies_met(task_id, completed_tasks, experiment_graph):
+ continue
+
+ task_config = experiment_graph.get_task_config(task_id)
+ task_config = self._task_input_resolver.resolve_input_container_references(experiment_id, task_config)
+
+ if not all(self._check_device_available(device) for device in task_config.devices):
+ continue
+ if not all(
+ self._check_container_available(container_id) for container_id in task_config.containers.values()
+ ):
+ continue
+
+ try:
+ resource_request = self._create_resource_request(experiment_id, task_id, task_config)
+ allocated_resources = await self._request_resources(resource_request)
+ self._allocated_resources.setdefault(experiment_id, {})[task_id] = allocated_resources
+ scheduled_tasks.append(
+ ScheduledTask(
+ id=task_id,
+ experiment_id=experiment_id,
+ devices=[
+ TaskDeviceConfig(lab_id=device.lab_id, id=device.id) for device in task_config.devices
+ ],
+ allocated_resources=allocated_resources,
+ )
+ )
+ except EosSchedulerResourceAllocationError:
+ log.warning(
+ f"Timed out in allocating resources for task '{task_id}' in experiment '{experiment_id}. "
+ f"Will retry.'"
+ )
+ continue
+
+ return scheduled_tasks
+
+ def _create_resource_request(
+ self, experiment_id: str, task_id: str, task_config: TaskConfig
+ ) -> ResourceAllocationRequest:
+ """
+ Create a single resource allocation request for all devices and containers required by a task.
+ """
+ request = ResourceAllocationRequest(
+ requester=task_id,
+ experiment_id=experiment_id,
+ reason=f"Resources required for task '{task_id}'",
+ )
+
+ for device in task_config.devices:
+ request.add_resource(device.id, device.lab_id, ResourceType.DEVICE)
+
+ for container_id in task_config.containers.values():
+ request.add_resource(container_id, "", ResourceType.CONTAINER)
+
+ return request
+
+ async def _request_resources(
+ self, resource_request: ResourceAllocationRequest, timeout: int = 15
+ ) -> ActiveResourceAllocationRequest:
+ """
+ Request resources from the resource allocation manager for a single resource allocation request. This method
+ will block until all resources are allocated or until the timeout is reached. If the timeout is reached, the
+ resource allocation will be aborted and an error will be raised.
+
+ :param resource_request: A resource allocation request to be allocated.
+ :param timeout: The maximum time to wait for resource allocation in seconds.
+
+ :return: An active resource allocation request that has been allocated.
+ """
+ allocation_event = asyncio.Event()
+ active_request = None
+
+ def resource_request_callback(request: ActiveResourceAllocationRequest) -> None:
+ nonlocal active_request
+ active_request = request
+ allocation_event.set()
+
+ active_resource_request = self._resource_allocation_manager.request_resources(
+ resource_request, resource_request_callback
+ )
+
+ if active_resource_request.status == ResourceRequestAllocationStatus.ALLOCATED:
+ return active_resource_request
+
+ self._resource_allocation_manager.process_active_requests()
+
+ try:
+ await asyncio.wait_for(allocation_event.wait(), timeout)
+ except asyncio.TimeoutError as e:
+ self._resource_allocation_manager.abort_active_request(active_resource_request.id)
+ raise EosSchedulerResourceAllocationError(
+ f"Resource allocation timed out after {timeout} seconds for task '{resource_request.requester}' "
+ f"while trying to schedule it. "
+ f"Aborting resource allocation for this task. Will retry again."
+ ) from e
+
+ if not active_request:
+ raise EosSchedulerResourceAllocationError(
+ f"Failed to allocate resources for task '{resource_request.requester}'."
+ )
+
+ return active_request
+
+ def _release_task_resources(self, experiment_id: str, task_id: str) -> None:
+ active_request = self._allocated_resources[experiment_id].pop(task_id, None)
+ if active_request:
+ try:
+ self._resource_allocation_manager.release_resources(active_request)
+ self._resource_allocation_manager.process_active_requests()
+ except EosResourceRequestError as e:
+ log.error(f"Error releasing resources for task '{task_id}' in experiment '{experiment_id}': {e}")
+
+ def _release_experiment_resources(self, experiment_id: str) -> None:
+ task_ids = list(self._allocated_resources.get(experiment_id, {}).keys())
+ for task_id in task_ids:
+ self._release_task_resources(experiment_id, task_id)
+
+ if experiment_id in self._allocated_resources:
+ del self._allocated_resources[experiment_id]
+
+ @staticmethod
+ def _check_task_dependencies_met(
+ task_id: str, completed_tasks: set[str], experiment_graph: ExperimentGraph
+ ) -> bool:
+ """
+ Return True if all dependencies of a task have been completed, False otherwise.
+ """
+ dependencies = experiment_graph.get_task_dependencies(task_id)
+ return all(dep in completed_tasks for dep in dependencies)
+
+ def _check_device_available(self, task_device: TaskDeviceConfig) -> bool:
+ """
+ Check if a device is available for a task. A device is available if it is active, not allocated by the device
+ allocation manager.
+ """
+ if self._device_manager.get_device(task_device.lab_id, task_device.id).status == DeviceStatus.INACTIVE:
+ log.warning(
+ f"Device {task_device.id} in lab {task_device.lab_id} is inactive but is requested by task "
+ f"{task_device.id}."
+ )
+ return False
+
+ return not self._device_allocation_manager.is_allocated(task_device.lab_id, task_device.id)
+
+ def _check_container_available(self, container_id: str) -> bool:
+ """
+ Check if a container is available for a task. A device is available if not allocated by the container
+ allocation manager.
+ """
+ return not self._container_allocation_manager.is_allocated(container_id)
+
+ def is_experiment_completed(self, experiment_id: str) -> bool:
+ """
+ Check if an experiment has been completed. The scheduler should consider the completed tasks from the task
+ manager to determine if the experiment has been completed.
+ """
+ if experiment_id not in self._registered_experiments:
+ raise EosSchedulerRegistrationError(
+ f"Cannot check if experiment {experiment_id} is completed as it is not registered."
+ )
+
+ experiment_type, experiment_graph = self._registered_experiments[experiment_id]
+ all_tasks = experiment_graph.get_task_graph().nodes
+ completed_tasks = self._experiment_manager.get_completed_tasks(experiment_id)
+
+ return all(task in completed_tasks for task in all_tasks)
diff --git a/eos/scheduling/entities/__init__.py b/eos/scheduling/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/scheduling/entities/scheduled_task.py b/eos/scheduling/entities/scheduled_task.py
new file mode 100644
index 0000000..06adcf4
--- /dev/null
+++ b/eos/scheduling/entities/scheduled_task.py
@@ -0,0 +1,11 @@
+from pydantic import BaseModel
+
+from eos.configuration.entities.task import TaskDeviceConfig
+from eos.resource_allocation.entities.resource_request import ActiveResourceAllocationRequest
+
+
+class ScheduledTask(BaseModel):
+ id: str
+ experiment_id: str
+ devices: list[TaskDeviceConfig]
+ allocated_resources: ActiveResourceAllocationRequest
diff --git a/eos/scheduling/exceptions.py b/eos/scheduling/exceptions.py
new file mode 100644
index 0000000..6f9fef1
--- /dev/null
+++ b/eos/scheduling/exceptions.py
@@ -0,0 +1,10 @@
+class EosSchedulerError(Exception):
+ pass
+
+
+class EosSchedulerRegistrationError(EosSchedulerError):
+ pass
+
+
+class EosSchedulerResourceAllocationError(EosSchedulerError):
+ pass
diff --git a/eos/tasks/__init__.py b/eos/tasks/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/tasks/base_task.py b/eos/tasks/base_task.py
new file mode 100644
index 0000000..bf1d970
--- /dev/null
+++ b/eos/tasks/base_task.py
@@ -0,0 +1,47 @@
+from abc import ABC, abstractmethod
+from typing import Any
+
+from eos.containers.entities.container import Container
+from eos.devices.device_actor_references import DeviceRayActorWrapperReferences
+from eos.tasks.exceptions import EosTaskExecutionError
+
+
+class BaseTask(ABC):
+ """Base class for all tasks in EOS."""
+
+ DevicesType = dict[str, DeviceRayActorWrapperReferences]
+ ParametersType = dict[str, Any]
+ ContainersType = dict[str, Container]
+ FilesType = dict[str, bytes]
+ OutputType = tuple[ParametersType, ContainersType, FilesType]
+
+ def __init__(self, experiment_id: str, task_id: str) -> None:
+ self._experiment_id = experiment_id
+ self._task_id = task_id
+
+ def execute(
+ self, devices: DevicesType, parameters: ParametersType, containers: ContainersType
+ ) -> OutputType | None:
+ """Execute a task with the given input and return the output."""
+ try:
+ output = self._execute(devices, parameters, containers)
+
+ output_parameters, output_containers, output_files = ({}, {}, {})
+
+ if output:
+ output_parameters = output[0] if len(output) > 0 and output[0] is not None else {}
+ output_containers = output[1] if len(output) > 1 and output[1] is not None else {}
+ output_files = output[2] if len(output) == 3 and output[2] is not None else {}
+
+ if containers:
+ output_containers = {**containers, **output_containers}
+
+ return output_parameters, output_containers, output_files
+ except Exception as e:
+ raise EosTaskExecutionError(f"Error executing task {self._task_id}") from e
+
+ @abstractmethod
+ def _execute(
+ self, devices: DevicesType, parameters: ParametersType, containers: ContainersType
+ ) -> OutputType | None:
+ """Implementation for the execution of a task."""
diff --git a/eos/tasks/entities/__init__.py b/eos/tasks/entities/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/tasks/entities/task.py b/eos/tasks/entities/task.py
new file mode 100644
index 0000000..095f489
--- /dev/null
+++ b/eos/tasks/entities/task.py
@@ -0,0 +1,86 @@
+from datetime import datetime, timezone
+from enum import Enum
+from typing import Any, ClassVar
+
+from omegaconf import ListConfig, DictConfig, OmegaConf
+from pydantic import BaseModel, field_serializer
+
+from eos.configuration.entities.task import TaskDeviceConfig
+from eos.containers.entities.container import Container
+
+
+class TaskStatus(Enum):
+ CREATED = "CREATED"
+ RUNNING = "RUNNING"
+ COMPLETED = "COMPLETED"
+ FAILED = "FAILED"
+ CANCELLED = "CANCELLED"
+
+
+class TaskContainer(BaseModel):
+ id: str
+
+
+class TaskInput(BaseModel):
+ parameters: dict[str, Any] | None = None
+ containers: dict[str, Container] | None = None
+
+ class Config:
+ arbitrary_types_allowed = True
+
+ @field_serializer("parameters")
+ def serialize_parameters(self, parameters: dict[str, Any] | None, _info) -> Any:
+ if parameters is None:
+ return None
+ return omegaconf_serializer(parameters)
+
+
+class TaskOutput(BaseModel):
+ parameters: dict[str, Any] | None = None
+ containers: dict[str, Container] | None = None
+ file_names: list[str] | None = None
+
+ @field_serializer("parameters")
+ def serialize_parameters(self, parameters: dict[str, Any] | None, _info) -> Any:
+ if parameters is None:
+ return None
+ return omegaconf_serializer(parameters)
+
+
+def omegaconf_serializer(obj: Any) -> Any:
+ if isinstance(obj, ListConfig | DictConfig):
+ return OmegaConf.to_object(obj)
+ if isinstance(obj, dict):
+ return {k: omegaconf_serializer(v) for k, v in obj.items()}
+ if isinstance(obj, list):
+ return [omegaconf_serializer(v) for v in obj]
+ return obj
+
+
+class Task(BaseModel):
+ id: str
+ type: str
+ experiment_id: str
+
+ devices: list[TaskDeviceConfig] = []
+ input: TaskInput = TaskInput()
+ output: TaskOutput = TaskInput()
+
+ status: TaskStatus = TaskStatus.CREATED
+
+ metadata: dict[str, Any] = {}
+ start_time: datetime | None = None
+ end_time: datetime | None = None
+
+ created_at: datetime = datetime.now(tz=timezone.utc)
+
+ class Config:
+ arbitrary_types_allowed = True
+ json_encoders: ClassVar = {
+ ListConfig: lambda v: omegaconf_serializer(v),
+ DictConfig: lambda v: omegaconf_serializer(v),
+ }
+
+ @field_serializer("status")
+ def status_enum_to_string(self, v: TaskStatus) -> str:
+ return v.value
diff --git a/eos/tasks/entities/task_execution_parameters.py b/eos/tasks/entities/task_execution_parameters.py
new file mode 100644
index 0000000..7d148c6
--- /dev/null
+++ b/eos/tasks/entities/task_execution_parameters.py
@@ -0,0 +1,10 @@
+from pydantic import BaseModel, Field
+
+from eos.configuration.entities.task import TaskConfig
+
+
+class TaskExecutionParameters(BaseModel):
+ experiment_id: str
+ task_config: TaskConfig
+ resource_allocation_priority: int = Field(120, ge=0)
+ resource_allocation_timeout: int = Field(30, ge=0)
diff --git a/eos/tasks/exceptions.py b/eos/tasks/exceptions.py
new file mode 100644
index 0000000..bdee178
--- /dev/null
+++ b/eos/tasks/exceptions.py
@@ -0,0 +1,26 @@
+class EosTaskError(Exception):
+ pass
+
+
+class EosTaskValidationError(EosTaskError):
+ pass
+
+
+class EosTaskInputResolutionError(EosTaskError):
+ pass
+
+
+class EosTaskStateError(EosTaskError):
+ pass
+
+
+class EosTaskExistsError(EosTaskError):
+ pass
+
+
+class EosTaskExecutionError(EosTaskError):
+ pass
+
+
+class EosTaskResourceAllocationError(EosTaskError):
+ pass
diff --git a/eos/tasks/on_demand_task_executor.py b/eos/tasks/on_demand_task_executor.py
new file mode 100644
index 0000000..23cb45b
--- /dev/null
+++ b/eos/tasks/on_demand_task_executor.py
@@ -0,0 +1,102 @@
+import asyncio
+import traceback
+from typing import Any
+
+from eos.configuration.entities.task import TaskConfig
+from eos.containers.container_manager import ContainerManager
+from eos.containers.entities.container import Container
+from eos.logging.logger import log
+from eos.tasks.entities.task import TaskOutput
+from eos.tasks.entities.task_execution_parameters import TaskExecutionParameters
+from eos.tasks.exceptions import EosTaskExecutionError, EosTaskValidationError, EosTaskStateError
+from eos.tasks.task_executor import TaskExecutor
+from eos.tasks.task_manager import TaskManager
+
+
+class OnDemandTaskExecutor:
+ """
+ Executor for on-demand tasks (not part of an experiment or campaign).
+ """
+
+ EXPERIMENT_ID = "on_demand"
+
+ def __init__(self, task_executor: TaskExecutor, task_manager: TaskManager, container_manager: ContainerManager):
+ self._task_executor = task_executor
+ self._task_manager = task_manager
+ self._container_manager = container_manager
+
+ self._task_futures: dict[str, asyncio.Task] = {}
+
+ log.debug("On-demand task executor initialized.")
+
+ async def submit_task(
+ self,
+ task_config: TaskConfig,
+ resource_allocation_priority: int = 90,
+ resource_allocation_timeout: int = 3600,
+ ) -> None:
+ task_id = task_config.id
+ task_execution_parameters = TaskExecutionParameters(
+ experiment_id=self.EXPERIMENT_ID,
+ task_config=task_config,
+ resource_allocation_priority=resource_allocation_priority,
+ resource_allocation_timeout=resource_allocation_timeout,
+ )
+
+ self._task_futures[task_id] = asyncio.create_task(
+ self._task_executor.request_task_execution(task_execution_parameters)
+ )
+ log.info(f"Submitted on-demand task '{task_id}'.")
+
+ async def cancel_task(self, task_id: str) -> None:
+ if task_id not in self._task_futures:
+ raise EosTaskExecutionError(f"Cannot cancel non-existent on-demand task '{task_id}'.")
+
+ future = self._task_futures[task_id]
+ future.cancel()
+ await self._task_executor.request_task_cancellation(self.EXPERIMENT_ID, task_id)
+ del self._task_futures[task_id]
+ log.info(f"Cancelled on-demand task '{task_id}'.")
+
+ async def process_tasks(self) -> None:
+ completed_tasks = []
+
+ for task_id, future in self._task_futures.items():
+ if future.done():
+ try:
+ output = await future
+ self._process_task_output(task_id, *output)
+ except asyncio.CancelledError:
+ log.info(f"On-demand task '{task_id}' was cancelled.")
+ except (EosTaskExecutionError, EosTaskValidationError, EosTaskStateError):
+ log.error(f"Failed on-demand task '{task_id}': {traceback.format_exc()}")
+ finally:
+ completed_tasks.append(task_id)
+
+ for task_id in completed_tasks:
+ del self._task_futures[task_id]
+
+ def _process_task_output(
+ self,
+ task_id: str,
+ output_parameters: dict[str, Any],
+ output_containers: dict[str, Container],
+ output_files: dict[str, bytes],
+ ) -> None:
+ for container in output_containers.values():
+ self._container_manager.update_container(container)
+
+ task_output = TaskOutput(
+ experiment_id=self.EXPERIMENT_ID,
+ task_id=task_id,
+ parameters=output_parameters,
+ containers=output_containers,
+ file_names=list(output_files.keys()),
+ )
+
+ for file_name, file_data in output_files.items():
+ self._task_manager.add_task_output_file(self.EXPERIMENT_ID, task_id, file_name, file_data)
+
+ self._task_manager.add_task_output(self.EXPERIMENT_ID, task_id, task_output)
+ self._task_manager.complete_task(self.EXPERIMENT_ID, task_id)
+ log.info(f"EXP '{self.EXPERIMENT_ID}' - Completed task '{task_id}'.")
diff --git a/eos/tasks/repositories/__init__.py b/eos/tasks/repositories/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/tasks/repositories/task_repository.py b/eos/tasks/repositories/task_repository.py
new file mode 100644
index 0000000..dce767b
--- /dev/null
+++ b/eos/tasks/repositories/task_repository.py
@@ -0,0 +1,5 @@
+from eos.persistence.mongo_repository import MongoRepository
+
+
+class TaskRepository(MongoRepository):
+ pass
diff --git a/eos/tasks/task_executor.py b/eos/tasks/task_executor.py
new file mode 100644
index 0000000..f025803
--- /dev/null
+++ b/eos/tasks/task_executor.py
@@ -0,0 +1,278 @@
+import asyncio
+from dataclasses import dataclass
+from typing import Any
+
+import ray
+from omegaconf import OmegaConf
+from ray import ObjectRef
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.configuration.plugin_registries.task_plugin_registry import TaskPluginRegistry
+from eos.containers.container_manager import ContainerManager
+from eos.containers.entities.container import Container
+from eos.devices.device_actor_references import DeviceRayActorReference, DeviceRayActorWrapperReferences
+from eos.devices.device_manager import DeviceManager
+from eos.logging.logger import log
+from eos.resource_allocation.entities.resource_request import (
+ ActiveResourceAllocationRequest,
+ ResourceAllocationRequest,
+ ResourceType,
+ ResourceRequestAllocationStatus,
+)
+from eos.resource_allocation.exceptions import EosResourceRequestError
+from eos.resource_allocation.resource_allocation_manager import ResourceAllocationManager
+from eos.scheduling.entities.scheduled_task import ScheduledTask
+from eos.tasks.base_task import BaseTask
+from eos.tasks.entities.task import TaskStatus
+from eos.tasks.entities.task_execution_parameters import TaskExecutionParameters
+from eos.tasks.exceptions import (
+ EosTaskResourceAllocationError,
+ EosTaskExecutionError,
+ EosTaskValidationError,
+ EosTaskExistsError,
+)
+from eos.tasks.task_input_parameter_caster import TaskInputParameterCaster
+from eos.tasks.task_manager import TaskManager
+from eos.tasks.task_validator import TaskValidator
+
+
+@dataclass
+class TaskExecutionContext:
+ experiment_id: str
+ task_id: str
+ task_ref: ObjectRef | None = None
+ active_resource_request: ActiveResourceAllocationRequest = None
+
+
+class TaskExecutor:
+ def __init__(
+ self,
+ task_manager: TaskManager,
+ device_manager: DeviceManager,
+ container_manager: ContainerManager,
+ resource_allocation_manager: ResourceAllocationManager,
+ configuration_manager: ConfigurationManager,
+ ):
+ self._task_manager = task_manager
+ self._device_manager = device_manager
+ self._container_manager = container_manager
+ self._resource_allocation_manager = resource_allocation_manager
+ self._configuration_manager = configuration_manager
+ self._task_plugin_registry = TaskPluginRegistry()
+ self._task_validator = TaskValidator()
+ self._task_input_parameter_caster = TaskInputParameterCaster()
+
+ self._active_tasks: dict[str, TaskExecutionContext] = {}
+
+ log.debug("Task executor initialized.")
+
+ async def request_task_execution(
+ self, task_parameters: TaskExecutionParameters, scheduled_task: ScheduledTask | None = None
+ ) -> BaseTask.OutputType | None:
+ context = TaskExecutionContext(task_parameters.experiment_id, task_parameters.task_config.id)
+ self._active_tasks[context.task_id] = context
+
+ try:
+ containers = self._prepare_containers(task_parameters)
+ await self._initialize_task(task_parameters, containers)
+
+ self._task_validator.validate(task_parameters.task_config)
+
+ context.active_resource_request = (
+ scheduled_task.allocated_resources
+ if scheduled_task
+ else await self._allocate_resources(task_parameters)
+ )
+
+ context.task_ref = self._execute_task(task_parameters, containers)
+ return await context.task_ref
+ except EosTaskExistsError as e:
+ raise EosTaskExecutionError(
+ f"Error executing task '{context.task_id}' in experiment '{context.experiment_id}'"
+ ) from e
+ except EosTaskValidationError as e:
+ self._task_manager.fail_task(context.experiment_id, context.task_id)
+ log.warning(f"EXP '{context.experiment_id}' - Failed task '{context.task_id}'.")
+ raise EosTaskValidationError(
+ f"Validation error for task '{context.task_id}' in experiment '{context.experiment_id}'"
+ ) from e
+ except EosTaskResourceAllocationError as e:
+ self._task_manager.fail_task(context.experiment_id, context.task_id)
+ log.warning(f"EXP '{context.experiment_id}' - Failed task '{context.task_id}'.")
+ raise EosTaskResourceAllocationError(
+ f"Failed to allocate resources for task '{context.task_id}' in experiment '{context.experiment_id}'"
+ ) from e
+ except Exception as e:
+ self._task_manager.fail_task(context.experiment_id, context.task_id)
+ log.warning(f"EXP '{context.experiment_id}' - Failed task '{context.task_id}'.")
+ raise EosTaskExecutionError(
+ f"Error executing task '{context.task_id}' in experiment '{context.experiment_id}'"
+ ) from e
+ finally:
+ if context.active_resource_request and not scheduled_task:
+ self._release_resources(context.active_resource_request)
+
+ if context.task_id in self._active_tasks:
+ del self._active_tasks[context.task_id]
+
+ async def request_task_cancellation(self, experiment_id: str, task_id: str) -> None:
+ context = self._active_tasks.get(task_id)
+ if not context:
+ return
+
+ if context.task_ref:
+ ray.cancel(context.task_ref, recursive=True)
+
+ if context.active_resource_request:
+ self._resource_allocation_manager.abort_active_request(context.active_resource_request.id)
+ self._resource_allocation_manager.process_active_requests()
+
+ self._task_manager.cancel_task(experiment_id, task_id)
+ log.warning(f"EXP '{experiment_id}' - Cancelled task '{task_id}'.")
+ del self._active_tasks[task_id]
+
+ def _prepare_containers(self, execution_parameters: TaskExecutionParameters) -> dict[str, Container]:
+ return {
+ container_name: self._container_manager.get_container(container_id)
+ for container_name, container_id in execution_parameters.task_config.containers.items()
+ }
+
+ async def _initialize_task(
+ self, execution_parameters: TaskExecutionParameters, containers: dict[str, Container]
+ ) -> None:
+ experiment_id, task_id = execution_parameters.experiment_id, execution_parameters.task_config.id
+ log.debug(f"Execution of task '{task_id}' for experiment '{experiment_id}' has been requested")
+
+ task = self._task_manager.get_task(experiment_id, task_id)
+ if task and task.status == TaskStatus.RUNNING:
+ log.warning(f"Found running task '{task_id}' for experiment '{experiment_id}'. Restarting it.")
+ await self.request_task_cancellation(experiment_id, task_id)
+ self._task_manager.delete_task(experiment_id, task_id)
+
+ self._task_manager.create_task(
+ experiment_id=experiment_id,
+ task_id=task_id,
+ task_type=execution_parameters.task_config.type,
+ devices=execution_parameters.task_config.devices,
+ parameters=execution_parameters.task_config.parameters,
+ containers=containers,
+ )
+
+ async def _allocate_resources(
+ self, execution_parameters: TaskExecutionParameters
+ ) -> ActiveResourceAllocationRequest:
+ resource_request = self._create_resource_request(execution_parameters)
+ return await self._request_resources(resource_request, execution_parameters.resource_allocation_timeout)
+
+ def _get_device_actor_references(self, task_parameters: TaskExecutionParameters) -> list[DeviceRayActorReference]:
+ return [
+ DeviceRayActorReference(
+ id=device.id,
+ lab_id=device.lab_id,
+ type=self._configuration_manager.labs[device.lab_id].devices[device.id].type,
+ actor_handle=self._device_manager.get_device_actor(device.lab_id, device.id),
+ )
+ for device in task_parameters.task_config.devices
+ ]
+
+ def _execute_task(
+ self,
+ task_execution_parameters: TaskExecutionParameters,
+ containers: dict[str, Container],
+ ) -> ObjectRef:
+ experiment_id, task_id = task_execution_parameters.experiment_id, task_execution_parameters.task_config.id
+ device_actor_references = self._get_device_actor_references(task_execution_parameters)
+ task_class_type = self._task_plugin_registry.get_task_class_type(task_execution_parameters.task_config.type)
+ parameters = task_execution_parameters.task_config.parameters
+ if not isinstance(parameters, dict):
+ parameters = OmegaConf.to_object(parameters)
+
+ parameters = self._task_input_parameter_caster.cast_input_parameters(
+ task_id, task_execution_parameters.task_config.type, parameters
+ )
+
+ @ray.remote(num_cpus=0)
+ def _ray_execute_task(
+ _experiment_id: str,
+ _task_id: str,
+ _devices_actor_references: list[DeviceRayActorReference],
+ _parameters: dict[str, Any],
+ _containers: dict[str, Container],
+ ) -> tuple:
+ task = task_class_type(_experiment_id, _task_id)
+ devices = DeviceRayActorWrapperReferences(_devices_actor_references)
+ return task.execute(devices, _parameters, _containers)
+
+ self._task_manager.start_task(experiment_id, task_id)
+ log.info(f"EXP '{experiment_id}' - Started task '{task_id}'.")
+
+ return _ray_execute_task.options(name=f"{experiment_id}.{task_id}").remote(
+ experiment_id,
+ task_id,
+ device_actor_references,
+ parameters,
+ containers,
+ )
+
+ @staticmethod
+ def _create_resource_request(
+ task_parameters: TaskExecutionParameters,
+ ) -> ResourceAllocationRequest:
+ task_id, experiment_id = task_parameters.task_config.id, task_parameters.experiment_id
+ resource_allocation_priority = task_parameters.resource_allocation_priority
+
+ request = ResourceAllocationRequest(
+ requester=task_id,
+ experiment_id=experiment_id,
+ reason=f"Resources required for task '{task_id}'",
+ priority=resource_allocation_priority,
+ )
+
+ for device in task_parameters.task_config.devices:
+ request.add_resource(device.id, device.lab_id, ResourceType.DEVICE)
+
+ for container_id in task_parameters.task_config.containers.values():
+ request.add_resource(container_id, "", ResourceType.CONTAINER)
+
+ return request
+
+ async def _request_resources(
+ self, resource_request: ResourceAllocationRequest, timeout: int = 30
+ ) -> ActiveResourceAllocationRequest:
+ allocation_event = asyncio.Event()
+ active_request = None
+
+ def resource_request_callback(request: ActiveResourceAllocationRequest) -> None:
+ nonlocal active_request
+ active_request = request
+ allocation_event.set()
+
+ active_resource_request = self._resource_allocation_manager.request_resources(
+ resource_request, resource_request_callback
+ )
+
+ if active_resource_request.status == ResourceRequestAllocationStatus.ALLOCATED:
+ return active_resource_request
+
+ self._resource_allocation_manager.process_active_requests()
+
+ try:
+ await asyncio.wait_for(allocation_event.wait(), timeout)
+ except asyncio.TimeoutError as e:
+ self._resource_allocation_manager.abort_active_request(active_resource_request.id)
+ raise EosTaskResourceAllocationError(
+ f"Resource allocation timed out after {timeout} seconds for task '{resource_request.requester}'. "
+ f"Aborting all resource allocations for this task."
+ ) from e
+
+ if not active_request:
+ raise EosTaskResourceAllocationError(f"Error allocating resources for task '{resource_request.requester}'")
+
+ return active_request
+
+ def _release_resources(self, active_request: ActiveResourceAllocationRequest) -> None:
+ try:
+ self._resource_allocation_manager.release_resources(active_request)
+ self._resource_allocation_manager.process_active_requests()
+ except EosResourceRequestError as e:
+ raise EosTaskExecutionError(f"Error releasing task '{active_request.request.requester}' resources") from e
diff --git a/eos/tasks/task_input_parameter_caster.py b/eos/tasks/task_input_parameter_caster.py
new file mode 100644
index 0000000..008e092
--- /dev/null
+++ b/eos/tasks/task_input_parameter_caster.py
@@ -0,0 +1,33 @@
+from typing import Any
+
+from eos.configuration.entities.parameters import ParameterType
+from eos.configuration.exceptions import EosTaskValidationError
+from eos.configuration.spec_registries.task_specification_registry import TaskSpecificationRegistry
+
+
+class TaskInputParameterCaster:
+ def __init__(self):
+ self.task_spec_registry = TaskSpecificationRegistry()
+
+ def cast_input_parameters(self, task_id: str, task_type: str, input_parameters: dict[str, Any]) -> dict[str, Any]:
+ """
+ Cast input parameters of a task to the expected Python types.
+
+ :param task_id: The ID of the task.
+ :param task_type: The type of the task.
+ :param input_parameters: The input parameters of the task.
+ :return: The input parameters cast to the expected Python types.
+ """
+ task_spec = self.task_spec_registry.get_spec_by_type(task_type)
+
+ for parameter_name, parameter in input_parameters.items():
+ try:
+ parameter_type = ParameterType(task_spec.input_parameters[parameter_name].type)
+ input_parameters[parameter_name] = parameter_type.python_type()(parameter)
+ except TypeError as e:
+ raise EosTaskValidationError(
+ f"Failed to cast input parameter '{parameter_name}' of task '{task_id}' of type \
+ f'{type(parameter)}' to the expected type '{task_spec.input_parameters[parameter_name].type}'."
+ ) from e
+
+ return input_parameters
diff --git a/eos/tasks/task_input_parameter_validator.py b/eos/tasks/task_input_parameter_validator.py
new file mode 100644
index 0000000..cb62f98
--- /dev/null
+++ b/eos/tasks/task_input_parameter_validator.py
@@ -0,0 +1,156 @@
+import copy
+from typing import Any
+
+from omegaconf import ListConfig, OmegaConf, DictConfig
+
+from eos.configuration.entities.parameters import ParameterType, ParameterFactory
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.configuration.exceptions import EosConfigurationError
+from eos.configuration.validation import validation_utils
+from eos.logging.batch_error_logger import batch_error, raise_batched_errors
+from eos.tasks.exceptions import EosTaskValidationError
+
+
+class TaskInputParameterValidator:
+ """
+ Validates that the input parameters of a task conform to the task's specification.
+ """
+
+ def __init__(self, task: TaskConfig, task_spec: TaskSpecification):
+ self._task_id = task.id
+ self._input_parameters = task.parameters
+ self._task_spec = task_spec
+
+ def validate_input_parameters(self) -> None:
+ """
+ Validate the input parameters of a task.
+ Ensure that all required parameters are provided and that the provided parameters conform to the task's
+ specification.
+ """
+ for parameter_name in self._input_parameters:
+ self._validate_parameter_in_task_spec(parameter_name)
+ raise_batched_errors(root_exception_type=EosTaskValidationError)
+
+ self._validate_all_required_parameters_provided()
+
+ for parameter_name, parameter in self._input_parameters.items():
+ self._validate_parameter(parameter_name, parameter)
+ raise_batched_errors(root_exception_type=EosTaskValidationError)
+
+ def _validate_parameter_in_task_spec(self, parameter_name: str) -> None:
+ """
+ Check that the parameter exists in the task specification.
+ """
+ if parameter_name not in self._task_spec.input_parameters:
+ batch_error(
+ f"Parameter '{parameter_name}' in task '{self._task_id}' is invalid. "
+ f"Expected a parameter found in the task specification.",
+ EosTaskValidationError,
+ )
+
+ def _validate_parameter(self, parameter_name: str, parameter: Any) -> None:
+ """
+ Validate a parameter according to the task specification. Expect that the parameter is concrete.
+ """
+ if validation_utils.is_dynamic_parameter(parameter):
+ batch_error(
+ f"Input parameter '{parameter_name}' in task '{self._task_id}' is 'eos_dynamic', which is not "
+ f"allowed.",
+ EosTaskValidationError,
+ )
+ else:
+ self._validate_parameter_spec(parameter_name, parameter)
+
+ def _validate_parameter_spec(self, parameter_name: str, parameter: Any) -> None:
+ """
+ Validate a parameter to make sure it conforms to its task specification.
+ """
+ parameter_spec = copy.deepcopy(self._task_spec.input_parameters[parameter_name])
+
+ try:
+ parameter = self._convert_value_type(parameter, ParameterType(parameter_spec.type))
+ except Exception:
+ batch_error(
+ f"Parameter '{parameter_name}' in task '{self._task_id}' has incorrect type {type(parameter)}. "
+ f"Expected type: '{parameter_spec.type}'.",
+ EosTaskValidationError,
+ )
+ return
+
+ parameter_spec["value"] = parameter
+
+ try:
+ parameter_type = ParameterType(parameter_spec.type)
+ ParameterFactory.create_parameter(parameter_type, **parameter_spec)
+ except EosConfigurationError as e:
+ batch_error(
+ f"Parameter '{parameter_name}' in task '{self._task_id}' validation error: {e}",
+ EosTaskValidationError,
+ )
+
+ def _convert_value_type(self, value: Any, expected_type: ParameterType) -> Any:
+ if isinstance(value, expected_type.python_type()):
+ return value
+
+ if isinstance(value, ListConfig | DictConfig):
+ value = OmegaConf.to_object(value)
+
+ conversion_map = {
+ ParameterType.integer: int,
+ ParameterType.decimal: float,
+ ParameterType.string: str,
+ ParameterType.choice: str,
+ }
+
+ if expected_type in conversion_map:
+ return conversion_map[expected_type](value)
+
+ if expected_type == ParameterType.boolean:
+ if isinstance(value, bool):
+ return value
+ if isinstance(value, str):
+ v = value.strip().lower()
+ if v == "true":
+ return True
+ if v == "false":
+ return False
+ raise ValueError(f"Cannot convert {value} to boolean")
+
+ if expected_type == ParameterType.list:
+ if isinstance(value, list | tuple):
+ return list(value)
+ raise ValueError(f"Cannot convert {value} to list")
+
+ if expected_type == ParameterType.dictionary:
+ if isinstance(value, dict):
+ return value
+ raise ValueError(f"Cannot convert {value} to dictionary")
+
+ raise ValueError(f"Unsupported parameter type: {expected_type}")
+
+ def _validate_all_required_parameters_provided(self) -> None:
+ """
+ Validate that all required parameters are provided in the parameter dictionary.
+ """
+ missing_parameters = self._get_missing_required_task_parameters()
+
+ if missing_parameters:
+ raise EosTaskValidationError(
+ f"Task '{self._task_id}' is missing required input parameters: {missing_parameters}"
+ )
+
+ def _get_missing_required_task_parameters(self) -> list[str]:
+ """
+ Get all the missing required parameters in the parameter dictionary.
+ """
+ required_parameters = self._get_required_input_parameters()
+ return [
+ parameter_name for parameter_name in required_parameters if parameter_name not in self._input_parameters
+ ]
+
+ def _get_required_input_parameters(self) -> list[str]:
+ """
+ Get all the required input parameters for the task.
+ """
+ return [param for param, spec in self._task_spec.input_parameters.items() if "value" not in spec]
diff --git a/eos/tasks/task_input_resolver.py b/eos/tasks/task_input_resolver.py
new file mode 100644
index 0000000..c24508a
--- /dev/null
+++ b/eos/tasks/task_input_resolver.py
@@ -0,0 +1,130 @@
+import copy
+import functools
+from typing import Protocol
+
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.validation import validation_utils
+from eos.experiments.experiment_manager import ExperimentManager
+from eos.tasks.exceptions import EosTaskInputResolutionError
+from eos.tasks.task_manager import TaskManager
+
+
+class Resolver(Protocol):
+ def __call__(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig: ...
+
+
+class TaskInputResolver:
+ """
+ Resolves dynamic parameters, input parameter references, and input container references for a task that is
+ part of an experiment.
+ """
+
+ def __init__(self, task_manager: TaskManager, experiment_manager: ExperimentManager):
+ self._task_manager = task_manager
+ self._experiment_manager = experiment_manager
+
+ def resolve_task_inputs(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig:
+ """
+ Resolve all input references for a task.
+ """
+ return self._apply_resolvers(
+ experiment_id,
+ task_config,
+ [
+ self._resolve_dynamic_parameters,
+ self._resolve_input_parameter_references,
+ self._resolve_input_container_references,
+ ],
+ )
+
+ def resolve_dynamic_parameters(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig:
+ """
+ Resolve dynamic parameters for a task.
+ """
+ return self._apply_resolvers(experiment_id, task_config, [self._resolve_dynamic_parameters])
+
+ def resolve_input_parameter_references(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig:
+ """
+ Resolve input parameter references for a task.
+ """
+ return self._apply_resolvers(experiment_id, task_config, [self._resolve_input_parameter_references])
+
+ def resolve_input_container_references(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig:
+ """
+ Resolve input container references for a task.
+ """
+ return self._apply_resolvers(experiment_id, task_config, [self._resolve_input_container_references])
+
+ def _apply_resolvers(self, experiment_id: str, task_config: TaskConfig, resolvers: list[Resolver]) -> TaskConfig:
+ """
+ Apply a list of resolver functions to the task config.
+ """
+ return functools.reduce(
+ lambda config, resolver: resolver(experiment_id, config), resolvers, copy.deepcopy(task_config)
+ )
+
+ def _resolve_dynamic_parameters(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig:
+ experiment = self._experiment_manager.get_experiment(experiment_id)
+ task_dynamic_parameters = experiment.dynamic_parameters.get(task_config.id, {})
+
+ task_config.parameters.update(task_dynamic_parameters)
+
+ unresolved_parameters = [
+ param for param, value in task_config.parameters.items() if validation_utils.is_dynamic_parameter(value)
+ ]
+
+ if unresolved_parameters:
+ raise EosTaskInputResolutionError(
+ f"Unresolved input dynamic parameters in task '{task_config.id}': {unresolved_parameters}"
+ )
+
+ return task_config
+
+ def _resolve_input_parameter_references(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig:
+ for param_name, param_value in task_config.parameters.items():
+ if not validation_utils.is_parameter_reference(param_value):
+ continue
+
+ ref_task_id, ref_param_name = param_value.split(".")
+ resolved_value = self._resolve_reference(experiment_id, ref_task_id, ref_param_name, "parameter")
+
+ if resolved_value is not None:
+ task_config.parameters[param_name] = resolved_value
+ else:
+ raise EosTaskInputResolutionError(
+ f"Unresolved input parameter reference '{param_value}' in task '{task_config.id}'"
+ )
+
+ return task_config
+
+ def _resolve_input_container_references(self, experiment_id: str, task_config: TaskConfig) -> TaskConfig:
+ for container_name, container_id in task_config.containers.items():
+ if not validation_utils.is_container_reference(container_id):
+ continue
+
+ ref_task_id, ref_container_name = container_id.split(".")
+ resolved_value = self._resolve_reference(experiment_id, ref_task_id, ref_container_name, "container")
+
+ if resolved_value is not None:
+ task_config.containers[container_name] = resolved_value
+ else:
+ raise EosTaskInputResolutionError(
+ f"Unresolved input container reference '{container_id}' in task '{task_config.id}'"
+ )
+
+ return task_config
+
+ def _resolve_reference(self, experiment_id: str, ref_task_id: str, ref_name: str, ref_type: str) -> str | None:
+ ref_task_output = self._task_manager.get_task_output(experiment_id, ref_task_id)
+
+ if ref_type == "parameter":
+ if ref_name in (ref_task_output.parameters or {}):
+ return ref_task_output.parameters[ref_name]
+ ref_task = self._task_manager.get_task(experiment_id, ref_task_id)
+ if ref_name in (ref_task.input.parameters or {}):
+ return ref_task.input.parameters[ref_name]
+ elif ref_type == "container":
+ if ref_name in (ref_task_output.containers or {}):
+ return ref_task_output.containers[ref_name].id
+
+ return None
diff --git a/eos/tasks/task_manager.py b/eos/tasks/task_manager.py
new file mode 100644
index 0000000..ad10117
--- /dev/null
+++ b/eos/tasks/task_manager.py
@@ -0,0 +1,215 @@
+from collections.abc import AsyncIterable
+from datetime import datetime, timezone
+from typing import Any
+
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.configuration.entities.task import TaskDeviceConfig
+from eos.containers.entities.container import Container
+from eos.experiments.repositories.experiment_repository import ExperimentRepository
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.persistence.file_db_manager import FileDbManager
+from eos.tasks.entities.task import Task, TaskStatus, TaskInput, TaskOutput
+from eos.tasks.exceptions import EosTaskStateError, EosTaskExistsError
+from eos.tasks.repositories.task_repository import TaskRepository
+
+
+class TaskManager:
+ """
+ Manages the state of all tasks in EOS.
+ """
+
+ def __init__(
+ self,
+ configuration_manager: ConfigurationManager,
+ db_manager: DbManager,
+ file_db_manager: FileDbManager,
+ ):
+ self._configuration_manager = configuration_manager
+ self._db_manager = db_manager
+ self._file_db_manager = file_db_manager
+ self._tasks = TaskRepository("tasks", db_manager)
+ self._tasks.create_indices([("experiment_id", 1), ("id", 1)], unique=True)
+ self._experiments = ExperimentRepository("experiments", db_manager)
+
+ log.debug("Task manager initialized.")
+
+ def create_task(
+ self,
+ experiment_id: str,
+ task_id: str,
+ task_type: str,
+ devices: list[TaskDeviceConfig],
+ parameters: dict[str, Any] | None = None,
+ containers: dict[str, Container] | None = None,
+ metadata: dict[str, Any] | None = None,
+ ) -> None:
+ """
+ Create a new task instance for a specific task type that is associated with an experiment.
+
+ :param experiment_id: The id of the experiment.
+ :param task_id: The id of the task in the experiment task sequence.
+ :param task_type: The type of the task as defined in the configuration.
+ :param devices: The devices required for the task.
+ :param parameters: The input parameters for the task.
+ :param containers: The input containers for the task.
+ :param metadata: Additional metadata to be stored with the task.
+ """
+ if self._tasks.get_one(experiment_id=experiment_id, id=task_id):
+ raise EosTaskExistsError(f"Cannot create task '{task_id}' as a task with that ID already exists.")
+
+ task_spec = self._configuration_manager.task_specs.get_spec_by_type(task_type)
+ if not task_spec:
+ raise EosTaskStateError(f"Task type '{task_type}' does not exist.")
+
+ task_input = TaskInput(parameters=parameters or {}, containers=containers or {})
+
+ task = Task(
+ id=task_id,
+ type=task_type,
+ experiment_id=experiment_id,
+ devices=[TaskDeviceConfig(id=device.id, lab_id=device.lab_id) for device in devices],
+ input=task_input,
+ metadata=metadata or {},
+ )
+ self._tasks.create(task.model_dump())
+
+ def delete_task(self, experiment_id: str, task_id: str) -> None:
+ """
+ Delete an experiment task instance.
+ """
+ self._validate_task_exists(experiment_id, task_id)
+
+ self._tasks.delete(experiment_id=experiment_id, id=task_id)
+
+ self._experiments.delete_running_task(experiment_id, task_id)
+ log.info(f"Deleted task '{task_id}' from experiment '{experiment_id}'.")
+
+ def start_task(self, experiment_id: str, task_id: str) -> None:
+ """
+ Add a task to the running tasks list and update its status to running.
+ """
+ self._validate_task_exists(experiment_id, task_id)
+ self._experiments.add_running_task(experiment_id, task_id)
+ self._set_task_status(experiment_id, task_id, TaskStatus.RUNNING)
+
+ def complete_task(self, experiment_id: str, task_id: str) -> None:
+ """
+ Remove a task from the running tasks list and add it to the completed tasks list.
+ """
+ self._validate_task_exists(experiment_id, task_id)
+ self._experiments.move_task_queue(experiment_id, task_id, "running_tasks", "completed_tasks")
+ self._set_task_status(experiment_id, task_id, TaskStatus.COMPLETED)
+
+ def fail_task(self, experiment_id: str, task_id: str) -> None:
+ """
+ Remove a task from the running tasks list and do not add it to the executed tasks list. Update the task status
+ to failed.
+ """
+ self._validate_task_exists(experiment_id, task_id)
+ self._experiments.delete_running_task(experiment_id, task_id)
+ self._set_task_status(experiment_id, task_id, TaskStatus.FAILED)
+
+ def cancel_task(self, experiment_id: str, task_id: str) -> None:
+ """
+ Remove a task from the running tasks list and do not add it to the executed tasks list. Update the task status
+ to cancelled.
+ """
+ self._validate_task_exists(experiment_id, task_id)
+ self._experiments.delete_running_task(experiment_id, task_id)
+ self._set_task_status(experiment_id, task_id, TaskStatus.CANCELLED)
+ log.warning(f"EXP '{experiment_id}' - Cancelled task '{task_id}'.")
+
+ def get_task(self, experiment_id: str, task_id: str) -> Task | None:
+ """
+ Get a task by its ID and experiment ID.
+ """
+ task = self._tasks.get_one(experiment_id=experiment_id, id=task_id)
+ return Task(**task) if task else None
+
+ def get_tasks(self, **query: dict[str, Any]) -> list[Task]:
+ """
+ Query tasks with arbitrary parameters.
+
+ :param query: Dictionary of query parameters.
+ """
+ tasks = self._tasks.get_all(**query)
+ return [Task(**task) for task in tasks]
+
+ def add_task_output(self, experiment_id: str, task_id: str, task_output: TaskOutput) -> None:
+ """
+ Add the output of a task to the database.
+ """
+ self._tasks.update({"output": task_output.model_dump()}, experiment_id=experiment_id, id=task_id)
+
+ def get_task_output(self, experiment_id: str, task_id: str) -> TaskOutput | None:
+ """
+ Get the output of a task by its ID and experiment ID.
+ """
+ result = self._tasks.get_one(experiment_id=experiment_id, id=task_id)
+ if not result:
+ return None
+
+ task = Task(**result)
+ if not task.output:
+ return None
+
+ return task.output
+
+ def add_task_output_file(self, experiment_id: str, task_id: str, file_name: str, file_data: bytes) -> None:
+ """
+ Add a file output from a task to the file database.
+ """
+ path = f"{experiment_id}/{task_id}/{file_name}"
+ self._file_db_manager.store_file(path, file_data)
+
+ def get_task_output_file(self, experiment_id: str, task_id: str, file_name: str) -> bytes:
+ """
+ Get a file output from a task from the file database.
+ """
+ path = f"{experiment_id}/{task_id}/{file_name}"
+ return self._file_db_manager.get_file(path)
+
+ def stream_task_output_file(
+ self, experiment_id: str, task_id: str, file_name: str, chunk_size: int = 3 * 1024 * 1024
+ ) -> AsyncIterable[bytes]:
+ """
+ Stream a file output from a task from the file database.
+ """
+ path = f"{experiment_id}/{task_id}/{file_name}"
+ return self._file_db_manager.stream_file(path, chunk_size)
+
+ def list_task_output_files(self, experiment_id: str, task_id: str) -> list[str]:
+ """
+ List all file outputs from a task in the file database.
+ """
+ prefix = f"{experiment_id}/{task_id}/"
+ return self._file_db_manager.list_files(prefix)
+
+ def delete_task_output_file(self, experiment_id: str, task_id: str, file_name: str) -> None:
+ """
+ Delete a file output from a task in the file database.
+ """
+ path = f"{experiment_id}/{task_id}/{file_name}"
+ self._file_db_manager.delete_file(path)
+
+ def _set_task_status(self, experiment_id: str, task_id: str, new_status: TaskStatus) -> None:
+ """
+ Update the status of a task.
+ """
+ self._validate_task_exists(experiment_id, task_id)
+
+ update_fields = {"status": new_status.value}
+ if new_status == TaskStatus.RUNNING:
+ update_fields["start_time"] = datetime.now(tz=timezone.utc)
+ elif new_status in [TaskStatus.COMPLETED, TaskStatus.FAILED, TaskStatus.CANCELLED]:
+ update_fields["end_time"] = datetime.now(tz=timezone.utc)
+
+ self._tasks.update(update_fields, experiment_id=experiment_id, id=task_id)
+
+ def _validate_task_exists(self, experiment_id: str, task_id: str) -> None:
+ """
+ Check if a task exists in an experiment.
+ """
+ if not self._tasks.exists(experiment_id=experiment_id, id=task_id):
+ raise EosTaskStateError(f"Task '{task_id}' does not exist in experiment '{experiment_id}'.")
diff --git a/eos/tasks/task_validator.py b/eos/tasks/task_validator.py
new file mode 100644
index 0000000..0e34b97
--- /dev/null
+++ b/eos/tasks/task_validator.py
@@ -0,0 +1,18 @@
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.configuration.spec_registries.task_specification_registry import TaskSpecificationRegistry
+from eos.tasks.task_input_parameter_validator import TaskInputParameterValidator
+
+
+class TaskValidator:
+ def __init__(self):
+ self.task_spec_registry = TaskSpecificationRegistry()
+
+ def validate(self, task_config: TaskConfig) -> None:
+ task_spec = self.task_spec_registry.get_spec_by_config(task_config)
+ self._validate_parameters(task_config, task_spec)
+
+ @staticmethod
+ def _validate_parameters(task_config: TaskConfig, task_spec: TaskSpecification) -> None:
+ validator = TaskInputParameterValidator(task_config, task_spec)
+ validator.validate_input_parameters()
diff --git a/eos/utils/__init__.py b/eos/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/utils/dict_utils.py b/eos/utils/dict_utils.py
new file mode 100644
index 0000000..50f38b8
--- /dev/null
+++ b/eos/utils/dict_utils.py
@@ -0,0 +1,53 @@
+from typing import Any
+
+import pandas as pd
+
+
+def flatten_dict(d: dict[str, Any], sep: str = ".") -> dict[str, Any]:
+ """
+ Flatten a nested dictionary, concatenating keys with a separator.
+ Works for arbitrary levels of nesting.
+ """
+
+ def _flatten(current: dict[str, Any], parent_key: str = "") -> dict[str, Any]:
+ items = {}
+ for k, v in current.items():
+ new_key = f"{parent_key}{sep}{k}" if parent_key else k
+ if isinstance(v, dict):
+ items.update(_flatten(v, new_key))
+ else:
+ items[new_key] = v
+ return items
+
+ return _flatten(d)
+
+
+def unflatten_dict(d: dict[str, Any], sep: str = ".") -> dict[str, Any]:
+ """
+ Unflatten a dictionary with concatenated keys into a nested dictionary.
+ Works for arbitrary levels of nesting.
+ """
+ unflattened = {}
+ for key, value in d.items():
+ parts = key.split(sep)
+ current = unflattened
+ for part in parts[:-1]:
+ current = current.setdefault(part, {})
+ current[parts[-1]] = value
+ return unflattened
+
+
+def dicts_to_dfs(dicts: list[dict[str, Any]], sep: str = ".") -> pd.DataFrame:
+ """
+ Convert a list of nested dictionaries to a pandas DataFrame.
+ Each nested dictionary is flattened, and the resulting DataFrame is created.
+ """
+ flattened_dicts = [flatten_dict(d, sep) for d in dicts]
+ return pd.DataFrame(flattened_dicts)
+
+
+def df_to_dicts(df: pd.DataFrame, sep: str = ".") -> list[dict[str, Any]]:
+ """
+ Convert a pandas DataFrame back to a list of nested dictionaries.
+ """
+ return [unflatten_dict(row.dropna().to_dict(), sep) for _, row in df.iterrows()]
diff --git a/eos/utils/file_utils.py b/eos/utils/file_utils.py
new file mode 100644
index 0000000..5e3ad38
--- /dev/null
+++ b/eos/utils/file_utils.py
@@ -0,0 +1,134 @@
+import dataclasses
+import os
+import re
+import tempfile
+import zipfile
+from pathlib import Path
+
+
+@dataclasses.dataclass
+class FileData:
+ filename: str
+ content: bytes
+
+
+@dataclasses.dataclass
+class FolderData:
+ folder_name: str
+ content: bytes
+
+
+def read_file(filename: str) -> FileData:
+ """
+ Reads a file and returns its content along with its name in a FileData object.
+
+ :param filename: The name of the file to be read.
+ :return: FileData object containing the filename and binary buffer.
+ """
+ with Path(filename).open("rb") as file:
+ buffer = file.read()
+ return FileData(Path(filename).name, buffer)
+
+
+def write_file(file_data: FileData) -> None:
+ """
+ Writes the binary buffer from a FileData object to a file.
+
+ :param file_data: The FileData object containing the filename and binary buffer.
+ """
+ with Path(file_data.filename).open("wb") as file:
+ file.write(file_data.content)
+
+
+def find_files_with_pattern(directory: str, pattern: str) -> list:
+ """
+ Search for files in the specified directory that match the given regex pattern.
+
+ :param directory: The directory path to search in.
+ :param pattern: The regex pattern to match filenames against.
+ :return: A list of filenames that match the pattern.
+ """
+ matched_files = []
+ regex = re.compile(pattern)
+
+ if not Path(directory).is_dir():
+ raise FileNotFoundError(f"Directory '{directory}' does not exist.")
+
+ for _, _, filenames in os.walk(directory):
+ for filename in filenames:
+ if regex.match(filename):
+ matched_files.append(filename)
+
+ return matched_files
+
+
+def find_highest_numbered_files(file_list: list) -> list:
+ """
+ Find all files with the greatest number following the dash or underscore in their names.
+ All files returned will have the same number.
+ Works with files formatted as "*-NUM.*"
+
+ :param file_list: List of filenames.
+ :return: List of filenames with the greatest common number.
+ """
+ number_pattern = re.compile(r"[\-_]([0-9]+)\.")
+
+ numbers = {}
+ for file in file_list:
+ match = number_pattern.search(file)
+ if match:
+ number = int(match.group(1))
+ if number in numbers:
+ numbers[number].append(file)
+ else:
+ numbers[number] = [file]
+
+ if not numbers:
+ return []
+
+ max_number = max(numbers.keys())
+
+ return numbers[max_number]
+
+
+def read_folder(folder_name: str) -> FolderData:
+ """
+ Zips a folder and reads it as a binary buffer.
+
+ :param folder_name: The name of the folder to be zipped and read.
+ :return: FolderData object containing the folder name and binary buffer of the zipped folder.
+ """
+ with tempfile.TemporaryFile() as temp_zip:
+ with zipfile.ZipFile(temp_zip, "w", zipfile.ZIP_DEFLATED) as zipf:
+ for root, _, files in os.walk(folder_name):
+ for file in files:
+ zipf.write(
+ Path(root) / file,
+ os.path.relpath(Path(root) / file, Path(folder_name).parent),
+ )
+
+ temp_zip.seek(0)
+ buffer = temp_zip.read()
+
+ return FolderData(Path(folder_name).name, buffer)
+
+
+def write_folder(folder_data: FolderData) -> None:
+ """
+ Decompresses the zip binary buffer and recreates the folder with a new name.
+
+ :param folder_data: The FolderData object containing the new folder name and binary buffer.
+ """
+ with tempfile.TemporaryDirectory() as temp_dir:
+ zip_filename = Path(temp_dir) / "temp.zip"
+
+ with Path(zip_filename).open("wb") as file:
+ file.write(folder_data.content)
+
+ with zipfile.ZipFile(zip_filename, "r") as zip_ref:
+ zip_ref.extractall(temp_dir)
+
+ root_folder = next(os.walk(temp_dir))[1][0]
+ extracted_folder_path = Path(temp_dir) / root_folder
+
+ Path(extracted_folder_path).rename(folder_data.folder_name)
diff --git a/eos/utils/ray_utils.py b/eos/utils/ray_utils.py
new file mode 100644
index 0000000..da2031e
--- /dev/null
+++ b/eos/utils/ray_utils.py
@@ -0,0 +1,40 @@
+from typing import Any
+
+import ray
+from ray.actor import ActorHandle
+
+
+class RayActorWrapper:
+ """
+ Wrapper for Ray actors to allow for easy synchronous calls to actor methods.
+ """
+
+ def __init__(self, actor: ActorHandle):
+ self.actor = actor
+
+ def __getattr__(self, name: str) -> Any:
+ if not name.startswith("__"):
+ async_func = getattr(self.actor, name)
+
+ def wrapper(*args, **kwargs) -> Any:
+ return ray.get(async_func.remote(*args, **kwargs))
+
+ return wrapper
+
+ return super().__getattr__(name)
+
+
+def ray_run(ray_remote_method: callable, *args, **kwargs) -> Any:
+ """
+ A helper function to simplify calling Ray remote functions.
+
+ Args:
+ ray_remote_method: The Ray remote method to be invoked.
+ *args: Arguments to be passed to the remote method.
+ **kwargs: Keyword arguments to be passed to the remote method.
+
+ Returns:
+ The result of the Ray remote method call.
+ """
+ # Invoke the remote method and get the result
+ return ray.get(ray_remote_method.remote(*args, **kwargs))
diff --git a/eos/utils/singleton.py b/eos/utils/singleton.py
new file mode 100644
index 0000000..5d30505
--- /dev/null
+++ b/eos/utils/singleton.py
@@ -0,0 +1,10 @@
+from typing import ClassVar
+
+
+class Singleton(type):
+ _instances: ClassVar = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super().__call__(*args, **kwargs)
+ return cls._instances[cls]
diff --git a/eos/web_api/__init__.py b/eos/web_api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/web_api/common/__init__.py b/eos/web_api/common/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/web_api/common/entities.py b/eos/web_api/common/entities.py
new file mode 100644
index 0000000..fe3328e
--- /dev/null
+++ b/eos/web_api/common/entities.py
@@ -0,0 +1,55 @@
+from typing import Any
+
+from pydantic import BaseModel
+
+from eos.campaigns.entities.campaign import CampaignExecutionParameters
+from eos.configuration.entities.task import TaskConfig
+from eos.experiments.entities.experiment import ExperimentExecutionParameters
+
+
+class SubmitTaskRequest(BaseModel):
+ task_config: TaskConfig
+ resource_allocation_priority: int = 1
+ resource_allocation_timeout: int = 180
+
+
+class TaskTypesResponse(BaseModel):
+ task_types: list[str] | str
+
+
+class SubmitExperimentRequest(BaseModel):
+ experiment_id: str
+ experiment_type: str
+ experiment_execution_parameters: ExperimentExecutionParameters
+ dynamic_parameters: dict[str, dict[str, Any]]
+ metadata: dict[str, Any] = {}
+
+
+class ExperimentTypes(BaseModel):
+ experiment_types: list[str] | str
+
+
+class ExperimentLoadedStatusesResponse(BaseModel):
+ experiment_loaded_statuses: dict[str, bool]
+
+
+class ExperimentTypesResponse(BaseModel):
+ experiment_types: list[str]
+
+
+class ExperimentDynamicParamsTemplateResponse(BaseModel):
+ dynamic_params_template: str
+
+
+class SubmitCampaignRequest(BaseModel):
+ campaign_id: str
+ experiment_type: str
+ campaign_execution_parameters: CampaignExecutionParameters
+
+
+class LabTypes(BaseModel):
+ lab_types: list[str] | str
+
+
+class LabLoadedStatusesResponse(BaseModel):
+ lab_loaded_statuses: dict[str, bool]
diff --git a/eos/web_api/orchestrator/__init__.py b/eos/web_api/orchestrator/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/web_api/orchestrator/controllers/__init__.py b/eos/web_api/orchestrator/controllers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/web_api/orchestrator/controllers/campaign_controller.py b/eos/web_api/orchestrator/controllers/campaign_controller.py
new file mode 100644
index 0000000..20312d4
--- /dev/null
+++ b/eos/web_api/orchestrator/controllers/campaign_controller.py
@@ -0,0 +1,33 @@
+from litestar import Controller, Response, get
+from litestar.handlers import post
+from litestar.status_codes import HTTP_200_OK, HTTP_404_NOT_FOUND, HTTP_201_CREATED
+
+from eos.orchestration.orchestrator import Orchestrator
+from eos.web_api.common.entities import SubmitCampaignRequest
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class CampaignController(Controller):
+ path = "/campaigns"
+
+ @get("/{campaign_id:str}")
+ @handle_exceptions("Failed to get campaign")
+ async def get_campaign(self, campaign_id: str, orchestrator: Orchestrator) -> Response:
+ campaign = await orchestrator.get_campaign(campaign_id)
+
+ if campaign is None:
+ return Response(content={"error": "Campaign not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ return Response(content=campaign.model_dump_json(), status_code=HTTP_200_OK)
+
+ @post("/submit")
+ @handle_exceptions("Failed to submit campaign")
+ async def submit_campaign(self, data: SubmitCampaignRequest, orchestrator: Orchestrator) -> Response:
+ await orchestrator.submit_campaign(data.campaign_id, data.experiment_type, data.campaign_execution_parameters)
+ return Response(content=None, status_code=HTTP_201_CREATED)
+
+ @post("/{campaign_id:str}/cancel")
+ @handle_exceptions("Failed to cancel campaign")
+ async def cancel_campaign(self, campaign_id: str, orchestrator: Orchestrator) -> Response:
+ await orchestrator.cancel_campaign(campaign_id)
+ return Response(content=None, status_code=HTTP_200_OK)
diff --git a/eos/web_api/orchestrator/controllers/experiment_controller.py b/eos/web_api/orchestrator/controllers/experiment_controller.py
new file mode 100644
index 0000000..b0cca83
--- /dev/null
+++ b/eos/web_api/orchestrator/controllers/experiment_controller.py
@@ -0,0 +1,75 @@
+from litestar import Controller, get, put, Response
+from litestar.handlers import post
+from litestar.status_codes import HTTP_200_OK, HTTP_201_CREATED, HTTP_404_NOT_FOUND
+
+from eos.orchestration.orchestrator import Orchestrator
+from eos.web_api.common.entities import (
+ SubmitExperimentRequest,
+ ExperimentTypesResponse,
+ ExperimentLoadedStatusesResponse,
+ ExperimentTypes,
+)
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class ExperimentController(Controller):
+ path = "/experiments"
+
+ @get("/{experiment_id:str}")
+ async def get_experiment(self, experiment_id: str, orchestrator: Orchestrator) -> Response:
+ experiment = await orchestrator.get_experiment(experiment_id)
+
+ if experiment is None:
+ return Response(content={"error": "Experiment not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ return Response(content=experiment.model_dump_json(), status_code=HTTP_200_OK)
+
+ @post("/submit")
+ @handle_exceptions("Failed to submit experiment")
+ async def submit_experiment(self, data: SubmitExperimentRequest, orchestrator: Orchestrator) -> Response:
+ await orchestrator.submit_experiment(
+ data.experiment_id,
+ data.experiment_type,
+ data.experiment_execution_parameters,
+ data.dynamic_parameters,
+ data.metadata,
+ )
+ return Response(content=None, status_code=HTTP_201_CREATED)
+
+ @post("/{experiment_id:str}/cancel")
+ @handle_exceptions("Failed to cancel experiment")
+ async def cancel_experiment(self, experiment_id: str, orchestrator: Orchestrator) -> Response:
+ await orchestrator.cancel_experiment(experiment_id)
+ return Response(content=None, status_code=HTTP_200_OK)
+
+ @put("/update_loaded")
+ @handle_exceptions("Failed to update loaded experiments")
+ async def update_loaded_experiments(self, data: ExperimentTypes, orchestrator: Orchestrator) -> Response:
+ await orchestrator.update_loaded_experiments(set(data.experiment_types))
+ return Response(content=None, status_code=HTTP_200_OK)
+
+ @put("/reload")
+ @handle_exceptions("Failed to reload experiments")
+ async def reload_experiments(self, data: ExperimentTypes, orchestrator: Orchestrator) -> Response:
+ await orchestrator.reload_experiments(set(data.experiment_types))
+ return Response(content=None, status_code=HTTP_200_OK)
+
+ @get("/types")
+ @handle_exceptions("Failed to get experiment types")
+ async def get_experiment_types(self, orchestrator: Orchestrator) -> ExperimentTypesResponse:
+ experiment_types = await orchestrator.get_experiment_types()
+ return ExperimentTypesResponse(experiment_types=experiment_types)
+
+ @get("/loaded_statuses")
+ @handle_exceptions("Failed to get experiment loaded statuses")
+ async def get_experiment_loaded_statuses(self, orchestrator: Orchestrator) -> ExperimentLoadedStatusesResponse:
+ experiment_loaded_statuses = await orchestrator.get_experiment_loaded_statuses()
+ return ExperimentLoadedStatusesResponse(experiment_loaded_statuses=experiment_loaded_statuses)
+
+ @get("/{experiment_type:str}/dynamic_params_template")
+ @handle_exceptions("Failed to get dynamic parameters template")
+ async def get_experiment_dynamic_params_template(
+ self, experiment_type: str, orchestrator: Orchestrator
+ ) -> Response:
+ dynamic_params_template = await orchestrator.get_experiment_dynamic_params_template(experiment_type)
+ return Response(content=dynamic_params_template, status_code=HTTP_200_OK)
diff --git a/eos/web_api/orchestrator/controllers/file_controller.py b/eos/web_api/orchestrator/controllers/file_controller.py
new file mode 100644
index 0000000..48fc445
--- /dev/null
+++ b/eos/web_api/orchestrator/controllers/file_controller.py
@@ -0,0 +1,74 @@
+import io
+import zipfile
+from collections.abc import AsyncIterable
+from pathlib import Path
+
+from litestar import Controller, get
+from litestar.exceptions import HTTPException
+from litestar.response import Stream
+
+from eos.orchestration.orchestrator import Orchestrator
+from eos.web_api.public.exception_handling import handle_exceptions
+
+_CHUNK_SIZE = 3 * 1024 * 1024 # 3MB
+
+
+class FileController(Controller):
+ path = "/files"
+
+ @get("/download/{experiment_id:str}/{task_id:str}/{file_name:str}")
+ @handle_exceptions("Failed to download file")
+ async def download_task_output_file(
+ self, experiment_id: str, task_id: str, file_name: str, orchestrator: Orchestrator
+ ) -> Stream:
+ async def file_stream() -> AsyncIterable:
+ try:
+ async for chunk in orchestrator.stream_task_output_file(
+ experiment_id, task_id, file_name, chunk_size=_CHUNK_SIZE
+ ):
+ yield chunk
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e)) from e
+
+ return Stream(file_stream(), headers={"Content-Disposition": f"attachment; filename={file_name}"})
+
+ @get("/download/{experiment_id:str}/{task_id:str}")
+ @handle_exceptions("Failed to download zipped task output files")
+ async def download_task_output_files_zipped(
+ self, experiment_id: str, task_id: str, orchestrator: Orchestrator
+ ) -> Stream:
+ async def zip_stream() -> AsyncIterable:
+ try:
+ file_list = await orchestrator.list_task_output_files(experiment_id, task_id)
+
+ buffer = io.BytesIO()
+ with zipfile.ZipFile(buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
+ for file_path in file_list:
+ file_name = Path(file_path).name
+
+ zip_info = zipfile.ZipInfo(file_name)
+ zip_info.compress_type = zipfile.ZIP_DEFLATED
+
+ with zip_file.open(zip_info, mode="w") as file_in_zip:
+ async for chunk in orchestrator.stream_task_output_file(experiment_id, task_id, file_name):
+ file_in_zip.write(chunk)
+
+ if buffer.tell() > _CHUNK_SIZE:
+ buffer.seek(0)
+ yield buffer.read(_CHUNK_SIZE)
+ buffer.seek(0)
+ buffer.truncate()
+
+ buffer.seek(0)
+ while True:
+ chunk = buffer.read(_CHUNK_SIZE)
+ if not chunk:
+ break
+ yield chunk
+
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e)) from e
+
+ return Stream(
+ zip_stream(), headers={"Content-Disposition": f"attachment; filename={experiment_id}_{task_id}_output.zip"}
+ )
diff --git a/eos/web_api/orchestrator/controllers/lab_controller.py b/eos/web_api/orchestrator/controllers/lab_controller.py
new file mode 100644
index 0000000..a1bc80a
--- /dev/null
+++ b/eos/web_api/orchestrator/controllers/lab_controller.py
@@ -0,0 +1,43 @@
+from litestar import Controller, put, get, Response
+from litestar.status_codes import HTTP_200_OK
+from omegaconf import OmegaConf
+
+from eos.orchestration.orchestrator import Orchestrator
+from eos.web_api.common.entities import LabLoadedStatusesResponse, LabTypes
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class LabController(Controller):
+ path = "/labs"
+
+ @get("/devices")
+ @handle_exceptions("Failed to get lab devices")
+ async def get_lab_devices(
+ self, lab_types: list[str] | None, task_type: str | None, orchestrator: Orchestrator
+ ) -> Response:
+ lab_devices = await orchestrator.get_lab_devices(lab_types, task_type)
+
+ # Convert LabDeviceConfig objects to plain dictionaries
+ dict_lab_devices = {}
+ for lab_type, devices in lab_devices.items():
+ dict_lab_devices[lab_type] = {name: OmegaConf.to_object(device) for name, device in devices.items()}
+
+ return Response(content=dict_lab_devices, status_code=HTTP_200_OK)
+
+ @put("/update_loaded")
+ @handle_exceptions("Failed to update loaded labs")
+ async def update_loaded_labs(self, data: LabTypes, orchestrator: Orchestrator) -> Response:
+ await orchestrator.update_loaded_labs(set(data.lab_types))
+ return Response(content=None, status_code=HTTP_200_OK)
+
+ @put("/reload")
+ @handle_exceptions("Failed to reload labs")
+ async def reload_labs(self, data: LabTypes, orchestrator: Orchestrator) -> Response:
+ await orchestrator.reload_labs(set(data.lab_types))
+ return Response(content=None, status_code=HTTP_200_OK)
+
+ @get("/loaded_statuses")
+ @handle_exceptions("Failed to get lab loaded statuses")
+ async def get_lab_loaded_statuses(self, orchestrator: Orchestrator) -> LabLoadedStatusesResponse:
+ lab_loaded_statuses = await orchestrator.get_lab_loaded_statuses()
+ return LabLoadedStatusesResponse(lab_loaded_statuses=lab_loaded_statuses)
diff --git a/eos/web_api/orchestrator/controllers/task_controller.py b/eos/web_api/orchestrator/controllers/task_controller.py
new file mode 100644
index 0000000..023f84d
--- /dev/null
+++ b/eos/web_api/orchestrator/controllers/task_controller.py
@@ -0,0 +1,45 @@
+from litestar import Controller, Response, get
+from litestar.handlers import post
+from litestar.status_codes import HTTP_200_OK, HTTP_201_CREATED
+from omegaconf import OmegaConf
+
+from eos.orchestration.orchestrator import Orchestrator
+from eos.web_api.common.entities import SubmitTaskRequest, TaskTypesResponse
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class TaskController(Controller):
+ path = "/tasks"
+
+ @get("/{experiment_id:str}/{task_id:str}")
+ @handle_exceptions("Failed to get task")
+ async def get_task(self, experiment_id: str, task_id: str, orchestrator: Orchestrator) -> Response:
+ task = await orchestrator.get_task(experiment_id, task_id)
+ return Response(content=task.model_dump_json(), status_code=HTTP_200_OK)
+
+ @post("/submit")
+ @handle_exceptions("Failed to submit task")
+ async def submit_task(self, data: SubmitTaskRequest, orchestrator: Orchestrator) -> Response:
+ await orchestrator.submit_task(
+ data.task_config, data.resource_allocation_priority, data.resource_allocation_timeout
+ )
+ return Response(content=None, status_code=HTTP_201_CREATED)
+
+ @post("/{task_id:str}/cancel")
+ @handle_exceptions("Failed to cancel task")
+ async def cancel_task(self, task_id: str, orchestrator: Orchestrator) -> Response:
+ await orchestrator.cancel_task(task_id)
+ return Response(content=None, status_code=HTTP_200_OK)
+
+ @get("/types")
+ @handle_exceptions("Failed to get task types")
+ async def get_task_types(self, orchestrator: Orchestrator) -> TaskTypesResponse:
+ task_types = await orchestrator.get_task_types()
+ return TaskTypesResponse(task_types=task_types)
+
+ @get("/{task_type:str}/spec")
+ @handle_exceptions("Failed to get task spec")
+ async def get_task_spec(self, task_type: str, orchestrator: Orchestrator) -> Response:
+ task_spec = await orchestrator.get_task_spec(task_type)
+ task_spec = OmegaConf.to_object(task_spec)
+ return Response(content=task_spec, status_code=HTTP_200_OK)
diff --git a/eos/web_api/orchestrator/exception_handling.py b/eos/web_api/orchestrator/exception_handling.py
new file mode 100644
index 0000000..b7cdd08
--- /dev/null
+++ b/eos/web_api/orchestrator/exception_handling.py
@@ -0,0 +1,40 @@
+from functools import wraps
+
+from litestar import Response, status_codes, Request
+
+from eos.logging.logger import log
+
+
+class AppError(Exception):
+ def __init__(
+ self, message: str, status_code: int = status_codes.HTTP_500_INTERNAL_SERVER_ERROR, expose_message: bool = False
+ ):
+ self.message = message
+ self.status_code = status_code
+ self.expose_message = expose_message
+
+
+def handle_exceptions(error_msg: str) -> callable:
+ def decorator(func: callable) -> callable:
+ @wraps(func)
+ async def wrapper(*args, **kwargs) -> Response:
+ try:
+ return await func(*args, **kwargs)
+ except Exception as e:
+ raise AppError(f"{error_msg}: {e!s}", expose_message=True) from e
+
+ return wrapper
+
+ return decorator
+
+
+def global_exception_handler(request: Request, exc: Exception) -> Response:
+ log.error(f"Web API error: {exc!s}")
+ if isinstance(exc, AppError):
+ content = {"message": exc.message} if exc.expose_message else {"message": "An error occurred"}
+ return Response(content=content, status_code=exc.status_code)
+
+ # For any other exception, return a generic error message
+ return Response(
+ content={"message": "An unexpected error occurred"}, status_code=status_codes.HTTP_500_INTERNAL_SERVER_ERROR
+ )
diff --git a/eos/web_api/public/__init__.py b/eos/web_api/public/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/web_api/public/controllers/__init__.py b/eos/web_api/public/controllers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/eos/web_api/public/controllers/campaign_controller.py b/eos/web_api/public/controllers/campaign_controller.py
new file mode 100644
index 0000000..c1bf2ab
--- /dev/null
+++ b/eos/web_api/public/controllers/campaign_controller.py
@@ -0,0 +1,47 @@
+from litestar import Controller, Response, get
+from litestar.datastructures import State
+from litestar.exceptions import HTTPException
+from litestar.handlers import post
+from litestar.status_codes import HTTP_200_OK, HTTP_404_NOT_FOUND, HTTP_201_CREATED
+
+from eos.web_api.common.entities import SubmitCampaignRequest
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class CampaignController(Controller):
+ path = "/campaigns"
+
+ @get("/{campaign_id:str}")
+ @handle_exceptions("Failed to get campaign")
+ async def get_campaign(self, campaign_id: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get(f"/api/campaigns/{campaign_id}") as response:
+ if response.status == HTTP_200_OK:
+ campaign = await response.json()
+ return Response(content=campaign, status_code=HTTP_200_OK)
+ if response.status == HTTP_404_NOT_FOUND:
+ return Response(content={"error": "Campaign not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ raise HTTPException(status_code=response.status, detail="Error fetching campaign")
+
+ @post("/submit")
+ @handle_exceptions("Failed to submit campaign")
+ async def submit_campaign(self, data: SubmitCampaignRequest, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.post("/api/campaigns/submit", json=data.model_dump()) as response:
+ if response.status == HTTP_201_CREATED:
+ return Response(content=None, status_code=HTTP_201_CREATED)
+
+ raise HTTPException(status_code=response.status, detail="Error submitting campaign")
+
+ @post("/{campaign_id:str}/cancel")
+ @handle_exceptions("Failed to cancel campaign")
+ async def cancel_campaign(self, campaign_id: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.post(f"/api/campaigns/{campaign_id}/cancel") as response:
+ if response.status == HTTP_200_OK:
+ return Response(content=None, status_code=HTTP_200_OK)
+ if response.status == HTTP_404_NOT_FOUND:
+ return Response(content={"error": "Campaign not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ raise HTTPException(status_code=response.status, detail="Error cancelling campaign")
diff --git a/eos/web_api/public/controllers/experiment_controller.py b/eos/web_api/public/controllers/experiment_controller.py
new file mode 100644
index 0000000..3049040
--- /dev/null
+++ b/eos/web_api/public/controllers/experiment_controller.py
@@ -0,0 +1,116 @@
+from litestar import Controller, get, put, Response
+from litestar.datastructures import State
+from litestar.exceptions import HTTPException
+from litestar.handlers import post
+from litestar.status_codes import HTTP_200_OK, HTTP_201_CREATED, HTTP_404_NOT_FOUND
+
+from eos.web_api.common.entities import (
+ SubmitExperimentRequest,
+ ExperimentTypesResponse,
+ ExperimentLoadedStatusesResponse,
+ ExperimentTypes,
+)
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class ExperimentController(Controller):
+ path = "/experiments"
+
+ @get("/{experiment_id:str}")
+ async def get_experiment(self, experiment_id: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get(f"/api/experiments/{experiment_id}") as response:
+ if response.status == HTTP_200_OK:
+ experiment = await response.json()
+ return Response(content=experiment, status_code=HTTP_200_OK)
+ if response.status == HTTP_404_NOT_FOUND:
+ return Response(content={"error": "Experiment not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ raise HTTPException(status_code=response.status, detail="Error fetching experiment")
+
+ @post("/submit")
+ @handle_exceptions("Failed to submit experiment")
+ async def submit_experiment(self, data: SubmitExperimentRequest, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.post("/api/experiments/submit", json=data.model_dump()) as response:
+ if response.status == HTTP_201_CREATED:
+ return Response(content=None, status_code=HTTP_201_CREATED)
+
+ raise HTTPException(status_code=response.status, detail="Error submitting experiment")
+
+ @post("/{experiment_id:str}/cancel")
+ @handle_exceptions("Failed to cancel experiment")
+ async def cancel_experiment(self, experiment_id: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.post(f"/api/experiments/{experiment_id}/cancel") as response:
+ if response.status == HTTP_200_OK:
+ return Response(content={"message": "Experiment cancelled successfully"}, status_code=HTTP_200_OK)
+ if response.status == HTTP_404_NOT_FOUND:
+ return Response(content={"error": "Experiment not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ raise HTTPException(status_code=response.status, detail="Error cancelling experiment")
+
+ @put("/update_loaded")
+ @handle_exceptions("Failed to update loaded experiments")
+ async def update_loaded_experiments(self, data: ExperimentTypes, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ if isinstance(data.experiment_types, str):
+ if data.experiment_types in ["", "[]"]:
+ data.experiment_types = []
+ else:
+ data.experiment_types = [data.experiment_types]
+ async with orchestrator_client.put(
+ "/api/experiments/update_loaded", json={"experiment_types": data.experiment_types}
+ ) as response:
+ if response.status == HTTP_200_OK:
+ return Response(content={"message": "Experiments updated successfully"}, status_code=HTTP_200_OK)
+
+ raise HTTPException(status_code=response.status, detail="Error updating loaded experiments")
+
+ @put("/reload")
+ @handle_exceptions("Failed to reload experiments")
+ async def reload_experiments(self, data: ExperimentTypes, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ if isinstance(data.experiment_types, str):
+ if data.experiment_types in ["", "[]"]:
+ data.experiment_types = []
+ else:
+ data.experiment_types = [data.experiment_types]
+ async with orchestrator_client.put(
+ "/api/experiments/reload", json={"experiment_types": data.experiment_types}
+ ) as response:
+ if response.status == HTTP_200_OK:
+ return Response(content={"message": "Experiments reloaded successfully"}, status_code=HTTP_200_OK)
+
+ raise HTTPException(status_code=response.status, detail="Error reloading experiments")
+
+ @get("/types")
+ @handle_exceptions("Failed to get experiment types")
+ async def get_experiment_types(self, state: State) -> ExperimentTypesResponse:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get("/api/experiments/types") as response:
+ if response.status == HTTP_200_OK:
+ return ExperimentTypesResponse(**await response.json())
+
+ raise HTTPException(status_code=response.status, detail="Error fetching experiment types")
+
+ @get("/loaded_statuses")
+ @handle_exceptions("Failed to get experiment loaded statuses")
+ async def get_experiment_loaded_statuses(self, state: State) -> ExperimentLoadedStatusesResponse:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get("/api/experiments/loaded_statuses") as response:
+ if response.status == HTTP_200_OK:
+ return ExperimentLoadedStatusesResponse(**await response.json())
+
+ raise HTTPException(status_code=response.status, detail="Error fetching experiment loaded statuses")
+
+ @get("/{experiment_type:str}/dynamic_params_template")
+ @handle_exceptions("Failed to get dynamic parameters template")
+ async def get_experiment_dynamic_params_template(self, experiment_type: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get(f"/api/experiments/{experiment_type}/dynamic_params_template") as response:
+ if response.status == HTTP_200_OK:
+ dynamic_params_template = await response.json()
+ return Response(content=dynamic_params_template, status_code=HTTP_200_OK)
+
+ raise HTTPException(status_code=response.status, detail="Error fetching dynamic parameters template")
diff --git a/eos/web_api/public/controllers/file_controller.py b/eos/web_api/public/controllers/file_controller.py
new file mode 100644
index 0000000..c226b11
--- /dev/null
+++ b/eos/web_api/public/controllers/file_controller.py
@@ -0,0 +1,49 @@
+from collections.abc import AsyncIterable
+
+from litestar import Controller, get
+from litestar.datastructures import State
+from litestar.exceptions import HTTPException
+from litestar.response import Stream
+from litestar.status_codes import HTTP_200_OK
+
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class FileController(Controller):
+ path = "/files"
+
+ @get("/download/{experiment_id:str}/{task_id:str}/{file_name:str}")
+ @handle_exceptions("Failed to download file")
+ async def download_task_output_file(self, experiment_id: str, task_id: str, file_name: str, state: State) -> Stream:
+ orchestrator_client = state.orchestrator_client
+
+ async def file_stream() -> AsyncIterable:
+ async with orchestrator_client.get(
+ f"/api/files/download/{experiment_id}/{task_id}/{file_name}", chunked=True
+ ) as response:
+ if response.status == HTTP_200_OK:
+ async for chunk in response.content.iter_any():
+ yield chunk
+ else:
+ raise HTTPException(status_code=response.status, detail="Error downloading file")
+
+ return Stream(file_stream(), headers={"Content-Disposition": f"attachment; filename={file_name}"})
+
+ @get("/download/{experiment_id:str}/{task_id:str}")
+ @handle_exceptions("Failed to download zipped task output files")
+ async def download_task_output_files_zipped(self, experiment_id: str, task_id: str, state: State) -> Stream:
+ orchestrator_client = state.orchestrator_client
+
+ async def zip_stream() -> AsyncIterable:
+ async with orchestrator_client.get(
+ f"/api/files/download/{experiment_id}/{task_id}", chunked=True
+ ) as response:
+ if response.status == HTTP_200_OK:
+ async for chunk in response.content.iter_any():
+ yield chunk
+ else:
+ raise HTTPException(status_code=response.status, detail="Error downloading zipped files")
+
+ return Stream(
+ zip_stream(), headers={"Content-Disposition": f"attachment; filename={experiment_id}_{task_id}_output.zip"}
+ )
diff --git a/eos/web_api/public/controllers/lab_controller.py b/eos/web_api/public/controllers/lab_controller.py
new file mode 100644
index 0000000..ce3cb22
--- /dev/null
+++ b/eos/web_api/public/controllers/lab_controller.py
@@ -0,0 +1,73 @@
+from litestar import Controller, put, get, Response
+from litestar.datastructures import State
+from litestar.exceptions import HTTPException
+from litestar.status_codes import HTTP_200_OK
+
+from eos.web_api.common.entities import LabLoadedStatusesResponse, LabTypes
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class LabController(Controller):
+ path = "/labs"
+
+ @get("/devices")
+ @handle_exceptions("Failed to get lab devices")
+ async def get_lab_devices(self, lab_types: list[str] | None, task_type: str | None, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+
+ params = {}
+ if lab_types:
+ params["lab_types"] = ",".join(lab_types)
+ if task_type:
+ params["task_type"] = task_type
+
+ async with orchestrator_client.get("/api/labs/devices", params=params) as response:
+ if response.status == HTTP_200_OK:
+ lab_devices = await response.json()
+ return Response(content=lab_devices, status_code=HTTP_200_OK)
+
+ raise HTTPException(status_code=response.status, detail="Error fetching lab devices")
+
+ @put("/update_loaded")
+ @handle_exceptions("Failed to update loaded labs")
+ async def update_loaded_labs(self, data: LabTypes, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+
+ if isinstance(data.lab_types, str):
+ if data.lab_types in ["", "[]"]:
+ data.lab_types = []
+ else:
+ data.lab_types = [data.lab_types]
+
+ async with orchestrator_client.put("/api/labs/update_loaded", json={"lab_types": data.lab_types}) as response:
+ if response.status == HTTP_200_OK:
+ return Response(content={"message": "Labs updated successfully"}, status_code=HTTP_200_OK)
+
+ raise HTTPException(status_code=response.status, detail="Error updating loaded labs")
+
+ @put("/reload")
+ @handle_exceptions("Failed to reload labs")
+ async def reload_labs(self, data: LabTypes, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+
+ if isinstance(data.lab_types, str):
+ if data.lab_types in ["", "[]"]:
+ data.lab_types = []
+ else:
+ data.lab_types = [data.lab_types]
+
+ async with orchestrator_client.put("/api/labs/reload", json={"lab_types": data.lab_types}) as response:
+ if response.status == HTTP_200_OK:
+ return Response(content={"message": "Labs reloaded successfully"}, status_code=HTTP_200_OK)
+
+ raise HTTPException(status_code=response.status, detail="Error reloading labs")
+
+ @get("/loaded_statuses")
+ @handle_exceptions("Failed to get lab loaded statuses")
+ async def get_lab_loaded_statuses(self, state: State) -> LabLoadedStatusesResponse:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get("/api/labs/loaded_statuses") as response:
+ if response.status == HTTP_200_OK:
+ return LabLoadedStatusesResponse(**await response.json())
+
+ raise HTTPException(status_code=response.status, detail="Error fetching lab loaded statuses")
diff --git a/eos/web_api/public/controllers/task_controller.py b/eos/web_api/public/controllers/task_controller.py
new file mode 100644
index 0000000..8fab1e3
--- /dev/null
+++ b/eos/web_api/public/controllers/task_controller.py
@@ -0,0 +1,71 @@
+from litestar import Controller, Response, get
+from litestar.datastructures import State
+from litestar.exceptions import HTTPException
+from litestar.handlers import post
+from litestar.status_codes import HTTP_200_OK, HTTP_201_CREATED, HTTP_404_NOT_FOUND
+
+from eos.web_api.common.entities import SubmitTaskRequest, TaskTypesResponse
+from eos.web_api.public.exception_handling import handle_exceptions
+
+
+class TaskController(Controller):
+ path = "/tasks"
+
+ @get("/{experiment_id:str}/{task_id:str}")
+ @handle_exceptions("Failed to get task")
+ async def get_task(self, experiment_id: str, task_id: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get(f"/api/tasks/{experiment_id}/{task_id}") as response:
+ if response.status == HTTP_200_OK:
+ task = await response.json()
+ return Response(content=task, status_code=HTTP_200_OK)
+ if response.status == HTTP_404_NOT_FOUND:
+ return Response(content={"error": "Task not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ raise HTTPException(status_code=response.status, detail="Error fetching task")
+
+ @post("/submit")
+ @handle_exceptions("Failed to submit task")
+ async def submit_task(self, data: SubmitTaskRequest, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.post("/api/tasks/submit", json=data.model_dump()) as response:
+ if response.status == HTTP_201_CREATED:
+ result = await response.json()
+ return Response(content=result, status_code=HTTP_201_CREATED)
+
+ raise HTTPException(status_code=response.status, detail="Error submitting task")
+
+ @post("/{task_id:str}/cancel")
+ @handle_exceptions("Failed to cancel task")
+ async def cancel_task(self, task_id: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.post(f"/api/tasks/{task_id}/cancel") as response:
+ if response.status == HTTP_200_OK:
+ return Response(content={"message": "Task cancelled successfully"}, status_code=HTTP_200_OK)
+ if response.status == HTTP_404_NOT_FOUND:
+ return Response(content={"error": "Task not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ raise HTTPException(status_code=response.status, detail="Error cancelling task")
+
+ @get("/types")
+ @handle_exceptions("Failed to get task types")
+ async def get_task_types(self, state: State) -> TaskTypesResponse:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get("/api/tasks/types") as response:
+ if response.status == HTTP_200_OK:
+ return TaskTypesResponse(**await response.json())
+
+ raise HTTPException(status_code=response.status, detail="Error fetching task types")
+
+ @get("/{task_type:str}/spec")
+ @handle_exceptions("Failed to get task spec")
+ async def get_task_spec(self, task_type: str, state: State) -> Response:
+ orchestrator_client = state.orchestrator_client
+ async with orchestrator_client.get(f"/api/tasks/{task_type}/spec") as response:
+ if response.status == HTTP_200_OK:
+ task_spec = await response.json()
+ return Response(content=task_spec, status_code=HTTP_200_OK)
+ if response.status == HTTP_404_NOT_FOUND:
+ return Response(content={"error": "Task specification not found"}, status_code=HTTP_404_NOT_FOUND)
+
+ raise HTTPException(status_code=response.status, detail="Error fetching task specification")
diff --git a/eos/web_api/public/exception_handling.py b/eos/web_api/public/exception_handling.py
new file mode 100644
index 0000000..a727d2e
--- /dev/null
+++ b/eos/web_api/public/exception_handling.py
@@ -0,0 +1,40 @@
+from functools import wraps
+
+from litestar import Response, status_codes, Request
+
+from eos.logging.logger import log
+
+
+class AppError(Exception):
+ def __init__(
+ self, message: str, status_code: int = status_codes.HTTP_500_INTERNAL_SERVER_ERROR, expose_message: bool = False
+ ) -> None:
+ self.message = message
+ self.status_code = status_code
+ self.expose_message = expose_message
+
+
+def handle_exceptions(error_msg: str) -> callable:
+ def decorator(func: callable) -> callable:
+ @wraps(func)
+ async def wrapper(*args, **kwargs) -> Response:
+ try:
+ return await func(*args, **kwargs)
+ except Exception as e:
+ raise AppError(f"{error_msg}: {e!s}", expose_message=True) from e
+
+ return wrapper
+
+ return decorator
+
+
+def global_exception_handler(request: Request, exc: Exception) -> Response:
+ log.error(f"Error: {exc!s}")
+ if isinstance(exc, AppError):
+ content = {"message": exc.message} if exc.expose_message else {"message": "An error occurred"}
+ return Response(content=content, status_code=exc.status_code)
+
+ # For any other exception, return a generic error message
+ return Response(
+ content={"message": "An unexpected error occurred"}, status_code=status_codes.HTTP_500_INTERNAL_SERVER_ERROR
+ )
diff --git a/eos/web_api/public/server.py b/eos/web_api/public/server.py
new file mode 100644
index 0000000..2e3e848
--- /dev/null
+++ b/eos/web_api/public/server.py
@@ -0,0 +1,50 @@
+import os
+from collections.abc import AsyncGenerator
+from contextlib import asynccontextmanager
+
+import aiohttp
+from litestar import Litestar, Router
+from litestar.logging import LoggingConfig
+from litestar.openapi import OpenAPIConfig
+
+from eos.web_api.public.controllers.campaign_controller import CampaignController
+from eos.web_api.public.controllers.experiment_controller import ExperimentController
+from eos.web_api.public.controllers.file_controller import FileController
+from eos.web_api.public.controllers.lab_controller import LabController
+from eos.web_api.public.controllers.task_controller import TaskController
+from eos.web_api.public.exception_handling import global_exception_handler
+
+orchestrator_host = os.getenv("EOS_ORCHESTRATOR_HOST", "localhost")
+orchestrator_port = os.getenv("EOS_ORCHESTRATOR_PORT", "8070")
+
+
+@asynccontextmanager
+async def orchestrator_client(app: Litestar) -> AsyncGenerator[None, None]:
+ client = getattr(app.state, "client", None)
+ if client is None:
+ client = aiohttp.ClientSession(base_url=f"http://{orchestrator_host}:{orchestrator_port}")
+ app.state.orchestrator_client = client
+ try:
+ yield
+ finally:
+ await client.close()
+
+
+api_router = Router(
+ path="/api",
+ route_handlers=[TaskController, ExperimentController, CampaignController, LabController, FileController],
+ exception_handlers={Exception: global_exception_handler},
+)
+
+logging_config = LoggingConfig(configure_root_logger=False)
+app = Litestar(
+ route_handlers=[api_router],
+ logging_config=logging_config,
+ lifespan=[orchestrator_client],
+ openapi_config=OpenAPIConfig(
+ title="EOS REST API",
+ description="Documentation for the EOS REST API",
+ version="0.5.0",
+ path="/docs",
+ ),
+)
diff --git a/pdm.lock b/pdm.lock
new file mode 100644
index 0000000..0d6439e
--- /dev/null
+++ b/pdm.lock
@@ -0,0 +1,3701 @@
+# This file is @generated by PDM.
+# It is not intended for manual editing.
+
+[metadata]
+groups = ["default", "dev", "docs"]
+strategy = ["inherit_metadata"]
+lock_version = "4.5.0"
+content_hash = "sha256:8c7e0b866c8de9954f521425fe4a268163c441bfce9aea93f717e5393ae11116"
+
+[[metadata.targets]]
+requires_python = ">=3.10"
+
+[[package]]
+name = "accessible-pygments"
+version = "0.0.5"
+requires_python = ">=3.9"
+summary = "A collection of accessible pygments styles"
+groups = ["docs"]
+dependencies = [
+ "pygments>=1.5",
+]
+files = [
+ {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"},
+ {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"},
+]
+
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.3.5"
+requires_python = ">=3.8"
+summary = "Happy Eyeballs for asyncio"
+groups = ["default"]
+files = [
+ {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"},
+ {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"},
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.10.3"
+requires_python = ">=3.8"
+summary = "Async http client/server framework (asyncio)"
+groups = ["default"]
+dependencies = [
+ "aiohappyeyeballs>=2.3.0",
+ "aiosignal>=1.1.2",
+ "async-timeout<5.0,>=4.0; python_version < \"3.11\"",
+ "attrs>=17.3.0",
+ "frozenlist>=1.1.1",
+ "multidict<7.0,>=4.5",
+ "yarl<2.0,>=1.0",
+]
+files = [
+ {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"},
+ {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"},
+ {file = "aiohttp-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71bb1d97bfe7e6726267cea169fdf5df7658831bb68ec02c9c6b9f3511e108bb"},
+ {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baec1eb274f78b2de54471fc4c69ecbea4275965eab4b556ef7a7698dee18bf2"},
+ {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13031e7ec1188274bad243255c328cc3019e36a5a907978501256000d57a7201"},
+ {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bbc55a964b8eecb341e492ae91c3bd0848324d313e1e71a27e3d96e6ee7e8e8"},
+ {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8cc0564b286b625e673a2615ede60a1704d0cbbf1b24604e28c31ed37dc62aa"},
+ {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f817a54059a4cfbc385a7f51696359c642088710e731e8df80d0607193ed2b73"},
+ {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8542c9e5bcb2bd3115acdf5adc41cda394e7360916197805e7e32b93d821ef93"},
+ {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:671efce3a4a0281060edf9a07a2f7e6230dca3a1cbc61d110eee7753d28405f7"},
+ {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0974f3b5b0132edcec92c3306f858ad4356a63d26b18021d859c9927616ebf27"},
+ {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:44bb159b55926b57812dca1b21c34528e800963ffe130d08b049b2d6b994ada7"},
+ {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6ae9ae382d1c9617a91647575255ad55a48bfdde34cc2185dd558ce476bf16e9"},
+ {file = "aiohttp-3.10.3-cp310-cp310-win32.whl", hash = "sha256:aed12a54d4e1ee647376fa541e1b7621505001f9f939debf51397b9329fd88b9"},
+ {file = "aiohttp-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b51aef59370baf7444de1572f7830f59ddbabd04e5292fa4218d02f085f8d299"},
+ {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e021c4c778644e8cdc09487d65564265e6b149896a17d7c0f52e9a088cc44e1b"},
+ {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24fade6dae446b183e2410a8628b80df9b7a42205c6bfc2eff783cbeedc224a2"},
+ {file = "aiohttp-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bc8e9f15939dacb0e1f2d15f9c41b786051c10472c7a926f5771e99b49a5957f"},
+ {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a9ec959b5381271c8ec9310aae1713b2aec29efa32e232e5ef7dcca0df0279"},
+ {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a5d0ea8a6467b15d53b00c4e8ea8811e47c3cc1bdbc62b1aceb3076403d551f"},
+ {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9ed607dbbdd0d4d39b597e5bf6b0d40d844dfb0ac6a123ed79042ef08c1f87e"},
+ {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e66d5b506832e56add66af88c288c1d5ba0c38b535a1a59e436b300b57b23e"},
+ {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fda91ad797e4914cca0afa8b6cccd5d2b3569ccc88731be202f6adce39503189"},
+ {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:61ccb867b2f2f53df6598eb2a93329b5eee0b00646ee79ea67d68844747a418e"},
+ {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d881353264e6156f215b3cb778c9ac3184f5465c2ece5e6fce82e68946868ef"},
+ {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b031ce229114825f49cec4434fa844ccb5225e266c3e146cb4bdd025a6da52f1"},
+ {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5337cc742a03f9e3213b097abff8781f79de7190bbfaa987bd2b7ceb5bb0bdec"},
+ {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab3361159fd3dcd0e48bbe804006d5cfb074b382666e6c064112056eb234f1a9"},
+ {file = "aiohttp-3.10.3-cp311-cp311-win32.whl", hash = "sha256:05d66203a530209cbe40f102ebaac0b2214aba2a33c075d0bf825987c36f1f0b"},
+ {file = "aiohttp-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:70b4a4984a70a2322b70e088d654528129783ac1ebbf7dd76627b3bd22db2f17"},
+ {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:166de65e2e4e63357cfa8417cf952a519ac42f1654cb2d43ed76899e2319b1ee"},
+ {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7084876352ba3833d5d214e02b32d794e3fd9cf21fdba99cff5acabeb90d9806"},
+ {file = "aiohttp-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d98c604c93403288591d7d6d7d6cc8a63459168f8846aeffd5b3a7f3b3e5e09"},
+ {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d73b073a25a0bb8bf014345374fe2d0f63681ab5da4c22f9d2025ca3e3ea54fc"},
+ {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8da6b48c20ce78f5721068f383e0e113dde034e868f1b2f5ee7cb1e95f91db57"},
+ {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a9dcdccf50284b1b0dc72bc57e5bbd3cc9bf019060dfa0668f63241ccc16aa7"},
+ {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56fb94bae2be58f68d000d046172d8b8e6b1b571eb02ceee5535e9633dcd559c"},
+ {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf75716377aad2c718cdf66451c5cf02042085d84522aec1f9246d3e4b8641a6"},
+ {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c51ed03e19c885c8e91f574e4bbe7381793f56f93229731597e4a499ffef2a5"},
+ {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b84857b66fa6510a163bb083c1199d1ee091a40163cfcbbd0642495fed096204"},
+ {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c124b9206b1befe0491f48185fd30a0dd51b0f4e0e7e43ac1236066215aff272"},
+ {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3461d9294941937f07bbbaa6227ba799bc71cc3b22c40222568dc1cca5118f68"},
+ {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08bd0754d257b2db27d6bab208c74601df6f21bfe4cb2ec7b258ba691aac64b3"},
+ {file = "aiohttp-3.10.3-cp312-cp312-win32.whl", hash = "sha256:7f9159ae530297f61a00116771e57516f89a3de6ba33f314402e41560872b50a"},
+ {file = "aiohttp-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:e1128c5d3a466279cb23c4aa32a0f6cb0e7d2961e74e9e421f90e74f75ec1edf"},
+ {file = "aiohttp-3.10.3.tar.gz", hash = "sha256:21650e7032cc2d31fc23d353d7123e771354f2a3d5b05a5647fc30fea214e696"},
+]
+
+[[package]]
+name = "aiohttp-cors"
+version = "0.7.0"
+summary = "CORS support for aiohttp"
+groups = ["default"]
+dependencies = [
+ "aiohttp>=1.1",
+ "typing; python_version < \"3.5\"",
+]
+files = [
+ {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"},
+ {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"},
+]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+requires_python = ">=3.7"
+summary = "aiosignal: a list of registered asynchronous callbacks"
+groups = ["default"]
+dependencies = [
+ "frozenlist>=1.1.0",
+]
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[[package]]
+name = "alabaster"
+version = "1.0.0"
+requires_python = ">=3.10"
+summary = "A light, configurable Sphinx theme"
+groups = ["docs"]
+files = [
+ {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"},
+ {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"},
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+requires_python = ">=3.8"
+summary = "Reusable constraint types to use with typing.Annotated"
+groups = ["default"]
+dependencies = [
+ "typing-extensions>=4.0.0; python_version < \"3.9\"",
+]
+files = [
+ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
+ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
+]
+
+[[package]]
+name = "antlr4-python3-runtime"
+version = "4.9.3"
+summary = "ANTLR 4.9.3 runtime for Python 3.7"
+groups = ["default"]
+dependencies = [
+ "typing; python_version < \"3.5\"",
+]
+files = [
+ {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"},
+]
+
+[[package]]
+name = "anyio"
+version = "4.4.0"
+requires_python = ">=3.8"
+summary = "High level compatibility layer for multiple asynchronous event loop implementations"
+groups = ["default", "docs"]
+dependencies = [
+ "exceptiongroup>=1.0.2; python_version < \"3.11\"",
+ "idna>=2.8",
+ "sniffio>=1.1",
+ "typing-extensions>=4.1; python_version < \"3.11\"",
+]
+files = [
+ {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
+ {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
+]
+
+[[package]]
+name = "argon2-cffi"
+version = "23.1.0"
+requires_python = ">=3.7"
+summary = "Argon2 for Python"
+groups = ["default"]
+dependencies = [
+ "argon2-cffi-bindings",
+ "typing-extensions; python_version < \"3.8\"",
+]
+files = [
+ {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"},
+ {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"},
+]
+
+[[package]]
+name = "argon2-cffi-bindings"
+version = "21.2.0"
+requires_python = ">=3.6"
+summary = "Low-level CFFI bindings for Argon2"
+groups = ["default"]
+dependencies = [
+ "cffi>=1.0.1",
+]
+files = [
+ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"},
+ {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"},
+]
+
+[[package]]
+name = "async-timeout"
+version = "4.0.3"
+requires_python = ">=3.7"
+summary = "Timeout context manager for asyncio programs"
+groups = ["default"]
+marker = "python_version < \"3.11\""
+dependencies = [
+ "typing-extensions>=3.6.5; python_version < \"3.8\"",
+]
+files = [
+ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
+]
+
+[[package]]
+name = "attrs"
+version = "24.2.0"
+requires_python = ">=3.7"
+summary = "Classes Without Boilerplate"
+groups = ["default"]
+dependencies = [
+ "importlib-metadata; python_version < \"3.8\"",
+]
+files = [
+ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
+ {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
+]
+
+[[package]]
+name = "babel"
+version = "2.16.0"
+requires_python = ">=3.8"
+summary = "Internationalization utilities"
+groups = ["docs"]
+dependencies = [
+ "pytz>=2015.7; python_version < \"3.9\"",
+]
+files = [
+ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"},
+ {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"},
+]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.3"
+requires_python = ">=3.6.0"
+summary = "Screen-scraping library"
+groups = ["docs"]
+dependencies = [
+ "soupsieve>1.2",
+]
+files = [
+ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
+ {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
+]
+
+[[package]]
+name = "black"
+version = "24.8.0"
+requires_python = ">=3.8"
+summary = "The uncompromising code formatter."
+groups = ["dev"]
+dependencies = [
+ "click>=8.0.0",
+ "mypy-extensions>=0.4.3",
+ "packaging>=22.0",
+ "pathspec>=0.9.0",
+ "platformdirs>=2",
+ "tomli>=1.1.0; python_version < \"3.11\"",
+ "typing-extensions>=4.0.1; python_version < \"3.11\"",
+]
+files = [
+ {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"},
+ {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"},
+ {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"},
+ {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"},
+ {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"},
+ {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"},
+ {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"},
+ {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"},
+ {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"},
+ {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"},
+ {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"},
+ {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"},
+ {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"},
+ {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"},
+]
+
+[[package]]
+name = "bofire"
+version = "0.0.13"
+requires_python = ">=3.9.0"
+summary = ""
+groups = ["default"]
+dependencies = [
+ "numpy",
+ "pandas",
+ "pydantic>=2.5",
+ "scipy>=1.7",
+ "typing-extensions",
+]
+files = [
+ {file = "bofire-0.0.13-py3-none-any.whl", hash = "sha256:a1b47732e70770a591d74bb24cdcd1ad1048b60e624df52a1529c6c54f8822c8"},
+ {file = "bofire-0.0.13.tar.gz", hash = "sha256:d1d83e781c63992c1fc9157587189251b4c06f6c9196f0e000e1cd329ca0fe6a"},
+]
+
+[[package]]
+name = "bofire"
+version = "0.0.13"
+extras = ["optimization"]
+requires_python = ">=3.9.0"
+summary = ""
+groups = ["default"]
+dependencies = [
+ "bofire==0.0.13",
+ "botorch>=0.10.0",
+ "cloudpickle>=2.0.0",
+ "cvxpy[clarabel]",
+ "formulaic>=1.0.1",
+ "multiprocess",
+ "plotly",
+ "scikit-learn>=1.0.0",
+ "sympy>=1.12",
+]
+files = [
+ {file = "bofire-0.0.13-py3-none-any.whl", hash = "sha256:a1b47732e70770a591d74bb24cdcd1ad1048b60e624df52a1529c6c54f8822c8"},
+ {file = "bofire-0.0.13.tar.gz", hash = "sha256:d1d83e781c63992c1fc9157587189251b4c06f6c9196f0e000e1cd329ca0fe6a"},
+]
+
+[[package]]
+name = "botorch"
+version = "0.11.3"
+requires_python = ">=3.10"
+summary = "Bayesian Optimization in PyTorch"
+groups = ["default"]
+dependencies = [
+ "gpytorch==1.12",
+ "linear-operator==0.5.2",
+ "mpmath<=1.3,>=0.19",
+ "multipledispatch",
+ "numpy<2.0",
+ "pyro-ppl>=1.8.4",
+ "scipy",
+ "torch>=1.13.1",
+]
+files = [
+ {file = "botorch-0.11.3-py3-none-any.whl", hash = "sha256:5ea3e95b82b9e7b36e1b04ed40c5d928fb4fb60f3ff1ef7f2fdd410979101e4d"},
+ {file = "botorch-0.11.3.tar.gz", hash = "sha256:600ab08f8007a94adbc5acf35073e7a25f55b58e85b2d895c101dabef74121ef"},
+]
+
+[[package]]
+name = "cachetools"
+version = "5.4.0"
+requires_python = ">=3.7"
+summary = "Extensible memoizing collections and decorators"
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+files = [
+ {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"},
+ {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"},
+]
+
+[[package]]
+name = "certifi"
+version = "2024.7.4"
+requires_python = ">=3.6"
+summary = "Python package for providing Mozilla's CA Bundle."
+groups = ["default", "docs"]
+files = [
+ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
+ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.0"
+requires_python = ">=3.8"
+summary = "Foreign Function Interface for Python calling C code."
+groups = ["default"]
+dependencies = [
+ "pycparser",
+]
+files = [
+ {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"},
+ {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"},
+ {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"},
+ {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"},
+ {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"},
+ {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"},
+ {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"},
+ {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"},
+ {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"},
+ {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"},
+ {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"},
+ {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"},
+ {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"},
+ {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"},
+ {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"},
+ {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"},
+ {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"},
+ {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"},
+ {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"},
+ {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"},
+ {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"},
+ {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"},
+ {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"},
+ {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"},
+ {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"},
+ {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"},
+ {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"},
+ {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"},
+ {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"},
+ {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"},
+ {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"},
+ {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.3.2"
+requires_python = ">=3.7.0"
+summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+groups = ["default", "docs"]
+files = [
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+]
+
+[[package]]
+name = "clarabel"
+version = "0.9.0"
+requires_python = ">=3.7"
+summary = "Clarabel Conic Interior Point Solver for Rust / Python"
+groups = ["default"]
+dependencies = [
+ "numpy",
+ "scipy",
+]
+files = [
+ {file = "clarabel-0.9.0-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:702cc4666c0ccf893c936f9f1f55cbb3233ae2d5fa05f67b370ac3e7ec50f222"},
+ {file = "clarabel-0.9.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8ea616757b460153ead375b3dd3ce763d46fc3717248077bbfa7b2c844b1775f"},
+ {file = "clarabel-0.9.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b5ae16d7dd87aabf72260cf9590ba0d037c52d48555bcf3a86b1f0d9cf88dd4"},
+ {file = "clarabel-0.9.0-cp37-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85cb560a5c4cdfb079e3437e21f0b62b69ba766ae082aeb96ced0b5763214077"},
+ {file = "clarabel-0.9.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0eaeb3fbb5a90b598700d5435c7f102592a1a79ee25df5a097e0af575838786b"},
+ {file = "clarabel-0.9.0-cp37-abi3-win32.whl", hash = "sha256:759c2fa0ccc61ae1a02691c43753638a0ae793bf1de81c6f6763c346789a7e25"},
+ {file = "clarabel-0.9.0-cp37-abi3-win_amd64.whl", hash = "sha256:d24e4ed1b686eb2fe2a1b6e77935af6ad62a2c044131e70801ec1d3ef3d33280"},
+ {file = "clarabel-0.9.0.tar.gz", hash = "sha256:0d6d3fe8800be5b4b5d40a8e14bd492667b3e46cc5dbe37677ce5ed25f0719d4"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+requires_python = ">=3.7"
+summary = "Composable command line interface toolkit"
+groups = ["default", "dev", "docs"]
+dependencies = [
+ "colorama; platform_system == \"Windows\"",
+ "importlib-metadata; python_version < \"3.8\"",
+]
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[[package]]
+name = "cloudpickle"
+version = "3.0.0"
+requires_python = ">=3.8"
+summary = "Pickler class to extend the standard pickle.Pickler functionality"
+groups = ["default"]
+files = [
+ {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"},
+ {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"},
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+summary = "Cross-platform colored terminal text."
+groups = ["default", "dev", "docs"]
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "colorful"
+version = "0.5.6"
+summary = "Terminal string styling done right, in Python."
+groups = ["default"]
+dependencies = [
+ "colorama; platform_system == \"Windows\"",
+]
+files = [
+ {file = "colorful-0.5.6-py2.py3-none-any.whl", hash = "sha256:eab8c1c809f5025ad2b5238a50bd691e26850da8cac8f90d660ede6ea1af9f1e"},
+ {file = "colorful-0.5.6.tar.gz", hash = "sha256:b56d5c01db1dac4898308ea889edcb113fbee3e6ec5df4bacffd61d5241b5b8d"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.6.1"
+requires_python = ">=3.8"
+summary = "Code coverage measurement for Python"
+groups = ["dev"]
+files = [
+ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
+ {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
+ {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
+ {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
+ {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
+ {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
+ {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
+ {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
+ {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
+ {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
+ {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
+ {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
+ {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
+ {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
+ {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
+ {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
+ {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
+ {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.6.1"
+extras = ["toml"]
+requires_python = ">=3.8"
+summary = "Code coverage measurement for Python"
+groups = ["dev"]
+dependencies = [
+ "coverage==7.6.1",
+ "tomli; python_full_version <= \"3.11.0a6\"",
+]
+files = [
+ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
+ {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
+ {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
+ {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
+ {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
+ {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
+ {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
+ {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
+ {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
+ {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
+ {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
+ {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
+ {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
+ {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
+ {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
+ {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
+ {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
+ {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
+]
+
+[[package]]
+name = "cvxpy"
+version = "1.5.2"
+requires_python = ">=3.8"
+summary = "A domain-specific language for modeling convex optimization problems in Python."
+groups = ["default"]
+dependencies = [
+ "clarabel>=0.5.0",
+ "ecos>=2",
+ "numpy>=1.15",
+ "osqp>=0.6.2",
+ "scipy>=1.1.0",
+ "scs>=3.2.4.post1",
+]
+files = [
+ {file = "cvxpy-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:60d24c3d656fa71b49790cc389313c8fdc9a2d17a97f530168eb93c46333c958"},
+ {file = "cvxpy-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9cbec71b6abca6c6d98f13e36a0daa95fecfbf54ef83b3f51c555ec0700b9b2f"},
+ {file = "cvxpy-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78e1fef846dec415f74783439bd66e9f232c885dee3ad5f9a050f83e3a433ac5"},
+ {file = "cvxpy-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d42bca823c3f7f63f82caff7163833dba1c4c8c5368b0435fa3417f70b7f0841"},
+ {file = "cvxpy-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:acbdd6c6f2e5e7a506429878a4d835eaf7efd45fdb4409c59fdfb8a157ef76c4"},
+ {file = "cvxpy-1.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b9cf1437327f84f78d4efdd1baada049de3a749a3548e24ec3502ef35e663c0b"},
+ {file = "cvxpy-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24c3156fb49252ea994d4629cbcecff1d1f1951ae76f6c225451d25d79dee923"},
+ {file = "cvxpy-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbb74e2eecd1b7ee5dfdcbf61a4916d12f90444df55c2377aa02935932d13421"},
+ {file = "cvxpy-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:515886f6760a017354674b8f045e096ba20e7641241bd6557d04d0a01bfefbaa"},
+ {file = "cvxpy-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:73e6917cd6754bef63a70fc93a83d80e2713a67c9b26157aa049d0e4588ee3c7"},
+ {file = "cvxpy-1.5.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:484f1a1f687c18cda6c382918a7c44f891d4901b4456d927da3c8ce9208c3e97"},
+ {file = "cvxpy-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d51b5e56dcd93a6efdd83ea0b39df83808691706db21c11496d59dc66dca108"},
+ {file = "cvxpy-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae4820f285a8547c5fac197073572ed9c750978651c6499e3ec30a92b6be26a8"},
+ {file = "cvxpy-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2189a959eb4bc1e81d8993e7fe780791d14fa486d558bd49adb8561b1df510"},
+ {file = "cvxpy-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:b8c1c9a302229ded2bc9bc5c7263e4a24bcb645f3cfedb29072b0b49d77af7fb"},
+ {file = "cvxpy-1.5.2.tar.gz", hash = "sha256:8231f006f6b55da141758282aecb788b3b5742448765dba6a9440b6336080ce3"},
+]
+
+[[package]]
+name = "cvxpy"
+version = "1.5.2"
+extras = ["clarabel"]
+requires_python = ">=3.8"
+summary = "A domain-specific language for modeling convex optimization problems in Python."
+groups = ["default"]
+dependencies = [
+ "cvxpy==1.5.2",
+]
+files = [
+ {file = "cvxpy-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:60d24c3d656fa71b49790cc389313c8fdc9a2d17a97f530168eb93c46333c958"},
+ {file = "cvxpy-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9cbec71b6abca6c6d98f13e36a0daa95fecfbf54ef83b3f51c555ec0700b9b2f"},
+ {file = "cvxpy-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78e1fef846dec415f74783439bd66e9f232c885dee3ad5f9a050f83e3a433ac5"},
+ {file = "cvxpy-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d42bca823c3f7f63f82caff7163833dba1c4c8c5368b0435fa3417f70b7f0841"},
+ {file = "cvxpy-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:acbdd6c6f2e5e7a506429878a4d835eaf7efd45fdb4409c59fdfb8a157ef76c4"},
+ {file = "cvxpy-1.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b9cf1437327f84f78d4efdd1baada049de3a749a3548e24ec3502ef35e663c0b"},
+ {file = "cvxpy-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24c3156fb49252ea994d4629cbcecff1d1f1951ae76f6c225451d25d79dee923"},
+ {file = "cvxpy-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbb74e2eecd1b7ee5dfdcbf61a4916d12f90444df55c2377aa02935932d13421"},
+ {file = "cvxpy-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:515886f6760a017354674b8f045e096ba20e7641241bd6557d04d0a01bfefbaa"},
+ {file = "cvxpy-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:73e6917cd6754bef63a70fc93a83d80e2713a67c9b26157aa049d0e4588ee3c7"},
+ {file = "cvxpy-1.5.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:484f1a1f687c18cda6c382918a7c44f891d4901b4456d927da3c8ce9208c3e97"},
+ {file = "cvxpy-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d51b5e56dcd93a6efdd83ea0b39df83808691706db21c11496d59dc66dca108"},
+ {file = "cvxpy-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae4820f285a8547c5fac197073572ed9c750978651c6499e3ec30a92b6be26a8"},
+ {file = "cvxpy-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2189a959eb4bc1e81d8993e7fe780791d14fa486d558bd49adb8561b1df510"},
+ {file = "cvxpy-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:b8c1c9a302229ded2bc9bc5c7263e4a24bcb645f3cfedb29072b0b49d77af7fb"},
+ {file = "cvxpy-1.5.2.tar.gz", hash = "sha256:8231f006f6b55da141758282aecb788b3b5742448765dba6a9440b6336080ce3"},
+]
+
+[[package]]
+name = "dill"
+version = "0.3.8"
+requires_python = ">=3.8"
+summary = "serialize all of Python"
+groups = ["default"]
+files = [
+ {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"},
+ {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"},
+]
+
+[[package]]
+name = "distlib"
+version = "0.3.8"
+summary = "Distribution utilities"
+groups = ["default"]
+files = [
+ {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
+ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
+]
+
+[[package]]
+name = "dnspython"
+version = "2.6.1"
+requires_python = ">=3.8"
+summary = "DNS toolkit"
+groups = ["default"]
+files = [
+ {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
+ {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
+]
+
+[[package]]
+name = "docutils"
+version = "0.21.2"
+requires_python = ">=3.9"
+summary = "Docutils -- Python Documentation Utilities"
+groups = ["docs"]
+files = [
+ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"},
+ {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"},
+]
+
+[[package]]
+name = "ecos"
+version = "2.0.14"
+summary = "This is the Python package for ECOS: Embedded Cone Solver. See Github page for more information."
+groups = ["default"]
+dependencies = [
+ "numpy>=1.6",
+ "scipy>=0.9",
+]
+files = [
+ {file = "ecos-2.0.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d16f8c97c42a18be77530b4d0090d8dd38105ae311518fc58a66c5c403d79672"},
+ {file = "ecos-2.0.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9a977976ec618261456d6c9cd4ec7b7745607e448e78cd0c851190c6cc515ef"},
+ {file = "ecos-2.0.14-cp310-cp310-win_amd64.whl", hash = "sha256:f2e8ab314609117f7e96bb83db7458f011ab0496c61078e146a8f5c8244e70b2"},
+ {file = "ecos-2.0.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dc90b54eaae16ead128bfdd95e04bf808b73578bdf40ed652c55aa36a6d02e42"},
+ {file = "ecos-2.0.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8be3b4856838ae351fec40fb3589181d52b41cf75bf4d35342686a508c37a6"},
+ {file = "ecos-2.0.14-cp311-cp311-win_amd64.whl", hash = "sha256:7495b3031ccc2d4cec72cdb40aed8a2d1fdd734fe40519b7e6047aead5e811cf"},
+ {file = "ecos-2.0.14-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4a7e2704a3ef9acfb8146d594deff9942d3a0f0d0399de8fe2e0bd95e8b0855c"},
+ {file = "ecos-2.0.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3cbb1a66ecf10955a1a4bcd6b99db55148000cb79fd176bfac26d98b21a4814"},
+ {file = "ecos-2.0.14-cp312-cp312-win_amd64.whl", hash = "sha256:718eb62afb8e45426bcc365ebaf3ca9f610afcbb754de6073ef5f104da8fca1f"},
+ {file = "ecos-2.0.14.tar.gz", hash = "sha256:64b3201c0e0a7f0129050557c4ac50b00031e80a10534506dba1200c8dc1efe4"},
+]
+
+[[package]]
+name = "editorconfig"
+version = "0.12.4"
+summary = "EditorConfig File Locator and Interpreter for Python"
+groups = ["default"]
+files = [
+ {file = "EditorConfig-0.12.4.tar.gz", hash = "sha256:24857fa1793917dd9ccf0c7810a07e05404ce9b823521c7dce22a4fb5d125f80"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.2.2"
+requires_python = ">=3.7"
+summary = "Backport of PEP 654 (exception groups)"
+groups = ["default", "dev", "docs"]
+marker = "python_version < \"3.11\""
+files = [
+ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
+ {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
+]
+
+[[package]]
+name = "faker"
+version = "27.0.0"
+requires_python = ">=3.8"
+summary = "Faker is a Python package that generates fake data for you."
+groups = ["default"]
+dependencies = [
+ "python-dateutil>=2.4",
+]
+files = [
+ {file = "Faker-27.0.0-py3-none-any.whl", hash = "sha256:55ed0c4ed7bf16800c64823805f6fbbe6d4823db4b7c0903f6f890b8e4d6c34b"},
+ {file = "faker-27.0.0.tar.gz", hash = "sha256:32c78b68d2ba97aaad78422e4035785de2b4bb46b81e428190fc11978da9036c"},
+]
+
+[[package]]
+name = "fast-query-parsers"
+version = "1.0.3"
+requires_python = ">=3.8"
+summary = "Ultra-fast query string and url-encoded form-data parsers"
+groups = ["default"]
+files = [
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-macosx_10_7_x86_64.whl", hash = "sha256:afbf71c1b4398dacfb9d84755eb026f8e759f68a066f1f3cc19e471fc342e74f"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:42f26875311d1b151c3406adfa39ec2db98df111a369d75f6fa243ec8462f147"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66630ad423b5b1f5709f82a4d8482cd6aa2f3fa73d2c779ff1877f25dee08d55"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6e3d816c572a6fad1ae9b93713b2db0d3db6e8f594e035ad52361d668dd94a8"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0bdcc0ddb4cc69d823c2c0dedd8f5affc71042db39908ad2ca06261bf388cac6"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6720505f2d2a764c76bcc4f3730a9dff69d9871740e46264f6605d73f9ce3794"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e947e7251769593da93832a10861f59565a46149fa117ebdf25377e7b2853936"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55a30b7cee0a53cddf9016b86fdad87221980d5a02a6126c491bd309755e6de9"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9bc2b457caa38371df1a30cfdfc57bd9bfdf348367abdaf6f36533416a0b0e93"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5736d3c32d6ba23995fa569fe572feabcfcfc30ac9e4709e94cff6f2c456a3d1"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:3a6377eb0c5b172fbc77c3f96deaf1e51708b4b96d27ce173658bf11c1c00b20"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:7ca6be04f443a1b055e910ccad01b1d72212f269a530415df99a87c5f1e9c927"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a70d4d8852606f2dd5b798ab628b9d8dc6970ddfdd9e96f4543eb0cc89a74fb5"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-win32.whl", hash = "sha256:14b3fab7e9a6ac1c1efaf66c3fd2a3fd1e25ede03ed14118035e530433830a11"},
+ {file = "fast_query_parsers-1.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:21ae5f3a209aee7d3b84bdcdb33dd79f39fc8cb608b3ae8cfcb78123758c1a16"},
+ {file = "fast_query_parsers-1.0.3.tar.gz", hash = "sha256:5200a9e02997ad51d4d76a60ea1b256a68a184b04359540eb6310a15013df68f"},
+]
+
+[[package]]
+name = "filelock"
+version = "3.15.4"
+requires_python = ">=3.8"
+summary = "A platform independent file lock."
+groups = ["default"]
+files = [
+ {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"},
+ {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"},
+]
+
+[[package]]
+name = "formulaic"
+version = "1.0.2"
+requires_python = ">=3.7.2"
+summary = "An implementation of Wilkinson formulas."
+groups = ["default"]
+dependencies = [
+ "astor>=0.8; python_version < \"3.9\"",
+ "cached-property>=1.3.0; python_version < \"3.8\"",
+ "graphlib-backport>=1.0.0; python_version < \"3.9\"",
+ "interface-meta>=1.2.0",
+ "numpy>=1.16.5",
+ "pandas>=1.0",
+ "scipy>=1.6",
+ "typing-extensions>=4.2.0",
+ "wrapt>=1.0",
+]
+files = [
+ {file = "formulaic-1.0.2-py3-none-any.whl", hash = "sha256:663328b038a0eb7644f59400615da7abf2672b0e11124b3bef3307afc441d97c"},
+ {file = "formulaic-1.0.2.tar.gz", hash = "sha256:6eb65bedd1903c5381d8f2ae7a55b6ba13cb77d57bbaf6e4278f3b2c38e3660e"},
+]
+
+[[package]]
+name = "frozenlist"
+version = "1.4.1"
+requires_python = ">=3.8"
+summary = "A list-like structure which implements collections.abc.MutableSequence"
+groups = ["default"]
+files = [
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"},
+ {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"},
+ {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"},
+ {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"},
+ {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"},
+ {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"},
+ {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"},
+ {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"},
+ {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
+]
+
+[[package]]
+name = "fsspec"
+version = "2024.6.1"
+requires_python = ">=3.8"
+summary = "File-system specification"
+groups = ["default"]
+files = [
+ {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"},
+ {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"},
+]
+
+[[package]]
+name = "google-api-core"
+version = "2.19.1"
+requires_python = ">=3.7"
+summary = "Google API client core library"
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+dependencies = [
+ "google-auth<3.0.dev0,>=2.14.1",
+ "googleapis-common-protos<2.0.dev0,>=1.56.2",
+ "proto-plus<2.0.0dev,>=1.22.3",
+ "protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0.dev0,>=3.19.5",
+ "requests<3.0.0.dev0,>=2.18.0",
+]
+files = [
+ {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"},
+ {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"},
+]
+
+[[package]]
+name = "google-auth"
+version = "2.33.0"
+requires_python = ">=3.7"
+summary = "Google Authentication Library"
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+dependencies = [
+ "cachetools<6.0,>=2.0.0",
+ "pyasn1-modules>=0.2.1",
+ "rsa<5,>=3.1.4",
+]
+files = [
+ {file = "google_auth-2.33.0-py2.py3-none-any.whl", hash = "sha256:8eff47d0d4a34ab6265c50a106a3362de6a9975bb08998700e389f857e4d39df"},
+ {file = "google_auth-2.33.0.tar.gz", hash = "sha256:d6a52342160d7290e334b4d47ba390767e4438ad0d45b7630774533e82655b95"},
+]
+
+[[package]]
+name = "googleapis-common-protos"
+version = "1.63.2"
+requires_python = ">=3.7"
+summary = "Common protobufs used in Google APIs"
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+dependencies = [
+ "protobuf!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0.dev0,>=3.20.2",
+]
+files = [
+ {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"},
+ {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"},
+]
+
+[[package]]
+name = "gpytorch"
+version = "1.12"
+requires_python = ">=3.8"
+summary = "An implementation of Gaussian Processes in Pytorch"
+groups = ["default"]
+dependencies = [
+ "linear-operator>=0.5.2",
+ "mpmath<=1.3,>=0.19",
+ "scikit-learn",
+ "scipy",
+]
+files = [
+ {file = "gpytorch-1.12-py3-none-any.whl", hash = "sha256:cee9da2dc53642a7aaba3da443b14f762dd07655a926920f5c8a4e5c54d39a8a"},
+ {file = "gpytorch-1.12.tar.gz", hash = "sha256:dc2c160af72364189f5b1fd7c804f69bdbcd8c65bfd3da5c9c2fc34029639adf"},
+]
+
+[[package]]
+name = "grpcio"
+version = "1.65.4"
+requires_python = ">=3.8"
+summary = "HTTP/2-based RPC framework"
+groups = ["default"]
+marker = "python_version >= \"3.10\""
+files = [
+ {file = "grpcio-1.65.4-cp310-cp310-linux_armv7l.whl", hash = "sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c"},
+ {file = "grpcio-1.65.4-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a"},
+ {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8"},
+ {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82"},
+ {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e"},
+ {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de"},
+ {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d"},
+ {file = "grpcio-1.65.4-cp310-cp310-win32.whl", hash = "sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1"},
+ {file = "grpcio-1.65.4-cp310-cp310-win_amd64.whl", hash = "sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec"},
+ {file = "grpcio-1.65.4-cp311-cp311-linux_armv7l.whl", hash = "sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef"},
+ {file = "grpcio-1.65.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18"},
+ {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8"},
+ {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17"},
+ {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4"},
+ {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e"},
+ {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5"},
+ {file = "grpcio-1.65.4-cp311-cp311-win32.whl", hash = "sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b"},
+ {file = "grpcio-1.65.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558"},
+ {file = "grpcio-1.65.4-cp312-cp312-linux_armv7l.whl", hash = "sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56"},
+ {file = "grpcio-1.65.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2"},
+ {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e"},
+ {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74"},
+ {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63"},
+ {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a"},
+ {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28"},
+ {file = "grpcio-1.65.4-cp312-cp312-win32.whl", hash = "sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0"},
+ {file = "grpcio-1.65.4-cp312-cp312-win_amd64.whl", hash = "sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416"},
+ {file = "grpcio-1.65.4.tar.gz", hash = "sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39"},
+]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+requires_python = ">=3.7"
+summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+groups = ["default", "docs"]
+dependencies = [
+ "typing-extensions; python_version < \"3.8\"",
+]
+files = [
+ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.5"
+requires_python = ">=3.8"
+summary = "A minimal low-level HTTP client."
+groups = ["default"]
+dependencies = [
+ "certifi",
+ "h11<0.15,>=0.13",
+]
+files = [
+ {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"},
+ {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"},
+]
+
+[[package]]
+name = "httptools"
+version = "0.6.1"
+requires_python = ">=3.8.0"
+summary = "A collection of framework independent HTTP protocol utils."
+groups = ["default"]
+files = [
+ {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"},
+ {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"},
+ {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"},
+ {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"},
+ {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"},
+ {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"},
+ {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"},
+ {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"},
+ {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"},
+ {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"},
+ {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"},
+ {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"},
+ {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"},
+ {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"},
+ {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"},
+ {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"},
+ {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"},
+ {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"},
+ {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"},
+ {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"},
+ {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"},
+ {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"},
+]
+
+[[package]]
+name = "httpx"
+version = "0.27.0"
+requires_python = ">=3.8"
+summary = "The next generation HTTP client."
+groups = ["default"]
+dependencies = [
+ "anyio",
+ "certifi",
+ "httpcore==1.*",
+ "idna",
+ "sniffio",
+]
+files = [
+ {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"},
+ {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"},
+]
+
+[[package]]
+name = "idna"
+version = "3.7"
+requires_python = ">=3.5"
+summary = "Internationalized Domain Names in Applications (IDNA)"
+groups = ["default", "docs"]
+files = [
+ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
+ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+]
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+summary = "Getting image size from png/jpeg/jpeg2000/gif file"
+groups = ["docs"]
+files = [
+ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+requires_python = ">=3.7"
+summary = "brain-dead simple config-ini parsing"
+groups = ["dev"]
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "interface-meta"
+version = "1.3.0"
+requires_python = ">=3.7,<4.0"
+summary = "`interface_meta` provides a convenient way to expose an extensible API with enforced method signatures and consistent documentation."
+groups = ["default"]
+files = [
+ {file = "interface_meta-1.3.0-py3-none-any.whl", hash = "sha256:de35dc5241431886e709e20a14d6597ed07c9f1e8b4bfcffde2190ca5b700ee8"},
+ {file = "interface_meta-1.3.0.tar.gz", hash = "sha256:8a4493f8bdb73fb9655dcd5115bc897e207319e36c8835f39c516a2d7e9d79a1"},
+]
+
+[[package]]
+name = "jaxtyping"
+version = "0.2.33"
+requires_python = "~=3.9"
+summary = "Type annotations and runtime checking for shape and dtype of JAX arrays, and PyTrees."
+groups = ["default"]
+dependencies = [
+ "typeguard==2.13.3",
+]
+files = [
+ {file = "jaxtyping-0.2.33-py3-none-any.whl", hash = "sha256:918d6094c73f28d3196185ef55d1832cbcd2804d1d388f180060c4366a9e2107"},
+ {file = "jaxtyping-0.2.33.tar.gz", hash = "sha256:9a9cfccae4fe05114b9fb27a5ea5440be4971a5a075bbd0526f6dd7d2730f83e"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+requires_python = ">=3.7"
+summary = "A very fast and expressive template engine."
+groups = ["default", "docs"]
+dependencies = [
+ "MarkupSafe>=2.0",
+]
+files = [
+ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[[package]]
+name = "joblib"
+version = "1.4.2"
+requires_python = ">=3.8"
+summary = "Lightweight pipelining with Python functions"
+groups = ["default"]
+files = [
+ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"},
+ {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"},
+]
+
+[[package]]
+name = "jsbeautifier"
+version = "1.15.1"
+summary = "JavaScript unobfuscator and beautifier."
+groups = ["default"]
+dependencies = [
+ "editorconfig>=0.12.2",
+ "six>=1.13.0",
+]
+files = [
+ {file = "jsbeautifier-1.15.1.tar.gz", hash = "sha256:ebd733b560704c602d744eafc839db60a1ee9326e30a2a80c4adb8718adc1b24"},
+]
+
+[[package]]
+name = "jsonschema"
+version = "4.23.0"
+requires_python = ">=3.8"
+summary = "An implementation of JSON Schema validation for Python"
+groups = ["default"]
+dependencies = [
+ "attrs>=22.2.0",
+ "importlib-resources>=1.4.0; python_version < \"3.9\"",
+ "jsonschema-specifications>=2023.03.6",
+ "pkgutil-resolve-name>=1.3.10; python_version < \"3.9\"",
+ "referencing>=0.28.4",
+ "rpds-py>=0.7.1",
+]
+files = [
+ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"},
+ {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"},
+]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2023.12.1"
+requires_python = ">=3.8"
+summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
+groups = ["default"]
+dependencies = [
+ "importlib-resources>=1.4.0; python_version < \"3.9\"",
+ "referencing>=0.31.0",
+]
+files = [
+ {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
+ {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
+]
+
+[[package]]
+name = "linear-operator"
+version = "0.5.2"
+requires_python = ">=3.8"
+summary = "A linear operator implementation, primarily designed for finite-dimensional positive definite operators (i.e. kernel matrices)."
+groups = ["default"]
+dependencies = [
+ "jaxtyping>=0.2.9",
+ "scipy",
+ "torch>=1.11",
+ "typeguard~=2.13.3",
+]
+files = [
+ {file = "linear_operator-0.5.2-py3-none-any.whl", hash = "sha256:26defe85e3c924f24d49117bf78afaf0207f6847877903309dc9bf40a46d08a7"},
+ {file = "linear_operator-0.5.2.tar.gz", hash = "sha256:5cd9099bca5b9f1e57017a4153526df7410561a46aedd47096c8da642159b90d"},
+]
+
+[[package]]
+name = "linkify-it-py"
+version = "2.0.3"
+requires_python = ">=3.7"
+summary = "Links recognition library with FULL unicode support."
+groups = ["default"]
+marker = "sys_platform != \"win32\""
+dependencies = [
+ "uc-micro-py",
+]
+files = [
+ {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"},
+ {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"},
+]
+
+[[package]]
+name = "litestar"
+version = "2.11.0"
+requires_python = "<4.0,>=3.8"
+summary = "Litestar - A production-ready, highly performant, extensible ASGI API Framework"
+groups = ["default"]
+dependencies = [
+ "anyio>=3",
+ "click",
+ "exceptiongroup; python_version < \"3.11\"",
+ "httpx>=0.22",
+ "importlib-metadata; python_version < \"3.10\"",
+ "importlib-resources>=5.12.0; python_version < \"3.9\"",
+ "msgspec>=0.18.2",
+ "multidict>=6.0.2",
+ "polyfactory>=2.6.3",
+ "pyyaml",
+ "rich-click",
+ "rich>=13.0.0",
+ "typing-extensions",
+]
+files = [
+ {file = "litestar-2.11.0-py3-none-any.whl", hash = "sha256:6d677ccdc00a0b4ce54cff5172531890358a27d6da1a054c8cab6a7e2119823e"},
+ {file = "litestar-2.11.0.tar.gz", hash = "sha256:6c8cf2b60c352e6b8e08e6a995d2a66ddc26ec53bc2f1df7214d26abcc1d00c2"},
+]
+
+[[package]]
+name = "litestar"
+version = "2.11.0"
+extras = ["standard"]
+requires_python = "<4.0,>=3.8"
+summary = "Litestar - A production-ready, highly performant, extensible ASGI API Framework"
+groups = ["default"]
+dependencies = [
+ "fast-query-parsers>=1.0.2",
+ "jinja2",
+ "jsbeautifier",
+ "litestar==2.11.0",
+ "uvicorn[standard]",
+ "uvloop>=0.18.0; sys_platform != \"win32\"",
+]
+files = [
+ {file = "litestar-2.11.0-py3-none-any.whl", hash = "sha256:6d677ccdc00a0b4ce54cff5172531890358a27d6da1a054c8cab6a7e2119823e"},
+ {file = "litestar-2.11.0.tar.gz", hash = "sha256:6c8cf2b60c352e6b8e08e6a995d2a66ddc26ec53bc2f1df7214d26abcc1d00c2"},
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+requires_python = ">=3.8"
+summary = "Python port of markdown-it. Markdown parsing, done right!"
+groups = ["default"]
+dependencies = [
+ "mdurl~=0.1",
+]
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+extras = ["linkify", "plugins"]
+requires_python = ">=3.8"
+summary = "Python port of markdown-it. Markdown parsing, done right!"
+groups = ["default"]
+marker = "sys_platform != \"win32\""
+dependencies = [
+ "linkify-it-py<3,>=1",
+ "markdown-it-py==3.0.0",
+ "mdit-py-plugins",
+]
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.5"
+requires_python = ">=3.7"
+summary = "Safely add untrusted strings to HTML/XML markup."
+groups = ["default", "docs"]
+files = [
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
+ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
+]
+
+[[package]]
+name = "mdit-py-plugins"
+version = "0.4.1"
+requires_python = ">=3.8"
+summary = "Collection of plugins for markdown-it-py"
+groups = ["default"]
+marker = "sys_platform != \"win32\""
+dependencies = [
+ "markdown-it-py<4.0.0,>=1.0.0",
+]
+files = [
+ {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"},
+ {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"},
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+requires_python = ">=3.7"
+summary = "Markdown URL utilities"
+groups = ["default"]
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "memray"
+version = "1.13.4"
+requires_python = ">=3.7.0"
+summary = "A memory profiler for Python applications"
+groups = ["default"]
+marker = "sys_platform != \"win32\""
+dependencies = [
+ "jinja2>=2.9",
+ "rich>=11.2.0",
+ "textual>=0.41.0",
+ "typing-extensions; python_version < \"3.8.0\"",
+]
+files = [
+ {file = "memray-1.13.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ed0bfcffbd857cbf78a4db942019e9e153019b754048b0522065844d1c538e8c"},
+ {file = "memray-1.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fcf71802b2c6d68c5336b1e4ae341eab64dcccd0dcf67687af53f18bc020237b"},
+ {file = "memray-1.13.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c9ae675131492bdfafcc44e86d0b81401ea8d052a9cab7793b1dab642cd58e6"},
+ {file = "memray-1.13.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bac9d30ce39aaee40601087d09c1639a071293f414b5e726a152ed3581d25e50"},
+ {file = "memray-1.13.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a437c7e28734028a2f43f942c3146e9737033718cea092ea910f6de3cf46221d"},
+ {file = "memray-1.13.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3cae161d5b6769cc3af574cfa0c7ea77f98d6ae714ba5ec508f6f05b84800801"},
+ {file = "memray-1.13.4-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:bf407123e175de4f5a7264886eb64ea514f4b388b617f05dfcd857d99ecadd1c"},
+ {file = "memray-1.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6f1bd3d0adf84f864e24f74552c1533224e64283dfee33641011acf384fc138"},
+ {file = "memray-1.13.4-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ba5bb9a3b7c3c08752f3b55a3b5b360963c9f666e2220eb388ab6f7d1271d843"},
+ {file = "memray-1.13.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1e8cec70e51e81c0e9448e62a5366914b74a3dbb60826cdec8f0e7559e58e74"},
+ {file = "memray-1.13.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81497e578017feb57a46e19c349450888e57ff7fb8f0f5134d3e07605c435500"},
+ {file = "memray-1.13.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e585d866c82ce92060fa1c925298aa8b89936ca22df9698a25a5f0cf7ca81fa2"},
+ {file = "memray-1.13.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d048da01dc138711a2c9c70ba693d186690c98fb0ca26fdc3483486d4849238"},
+ {file = "memray-1.13.4-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:b6459761046ab46638d2c62d7f3f55eaaf45a947bd1d36dcfb5e860047280557"},
+ {file = "memray-1.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:637651f5ca2870e9156f189c337e8c6d0002e3f6f7d44d6486ff5baf12a6115e"},
+ {file = "memray-1.13.4-cp312-cp312-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d5b9e10fde6f652ea176cbc0d4d4c563d2831faec4434d3e03c4c0aff8ddc6c0"},
+ {file = "memray-1.13.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1f3ab803b703b9be29259039caf43803ad5abf37f04e77cd9e8373054dd91f6"},
+ {file = "memray-1.13.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfdc070da2df9241f78b7429d44f6ee16e924d43eddc587f6ed7218c4cb792d3"},
+ {file = "memray-1.13.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:523a63dee71cd4d55eddca866244a045e7549ca5137ec906c62893b87a2161ce"},
+ {file = "memray-1.13.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3bf06f8883a26b779cc828addad97a2d39d7587263e348655dae3ec90b6ee079"},
+ {file = "memray-1.13.4.tar.gz", hash = "sha256:48f8f9b89b3a84028668244151eb7248189fb3f4f2a761ec1211439adcbb2ad1"},
+]
+
+[[package]]
+name = "minio"
+version = "7.2.8"
+requires_python = ">3.8"
+summary = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage"
+groups = ["default"]
+dependencies = [
+ "argon2-cffi",
+ "certifi",
+ "pycryptodome",
+ "typing-extensions",
+ "urllib3",
+]
+files = [
+ {file = "minio-7.2.8-py3-none-any.whl", hash = "sha256:aa3b485788b63b12406a5798465d12a57e4be2ac2a58a8380959b6b748e64ddd"},
+ {file = "minio-7.2.8.tar.gz", hash = "sha256:f8af2dafc22ebe1aef3ac181b8e217037011c430aa6da276ed627e55aaf7c815"},
+]
+
+[[package]]
+name = "mpmath"
+version = "1.3.0"
+summary = "Python library for arbitrary-precision floating-point arithmetic"
+groups = ["default"]
+files = [
+ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
+ {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
+]
+
+[[package]]
+name = "msgpack"
+version = "1.0.8"
+requires_python = ">=3.8"
+summary = "MessagePack serializer"
+groups = ["default"]
+files = [
+ {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"},
+ {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"},
+ {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"},
+ {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"},
+ {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"},
+ {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"},
+ {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"},
+ {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"},
+ {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"},
+ {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"},
+ {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"},
+ {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"},
+ {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"},
+ {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"},
+ {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"},
+ {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"},
+ {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"},
+ {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"},
+ {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"},
+ {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"},
+ {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"},
+ {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"},
+ {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"},
+ {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"},
+ {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"},
+ {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"},
+ {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"},
+ {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"},
+ {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"},
+ {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"},
+ {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"},
+ {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"},
+ {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"},
+ {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"},
+]
+
+[[package]]
+name = "msgspec"
+version = "0.18.6"
+requires_python = ">=3.8"
+summary = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML."
+groups = ["default"]
+files = [
+ {file = "msgspec-0.18.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77f30b0234eceeff0f651119b9821ce80949b4d667ad38f3bfed0d0ebf9d6d8f"},
+ {file = "msgspec-0.18.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a76b60e501b3932782a9da039bd1cd552b7d8dec54ce38332b87136c64852dd"},
+ {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06acbd6edf175bee0e36295d6b0302c6de3aaf61246b46f9549ca0041a9d7177"},
+ {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40a4df891676d9c28a67c2cc39947c33de516335680d1316a89e8f7218660410"},
+ {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a6896f4cd5b4b7d688018805520769a8446df911eb93b421c6c68155cdf9dd5a"},
+ {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ac4dd63fd5309dd42a8c8c36c1563531069152be7819518be0a9d03be9788e4"},
+ {file = "msgspec-0.18.6-cp310-cp310-win_amd64.whl", hash = "sha256:fda4c357145cf0b760000c4ad597e19b53adf01382b711f281720a10a0fe72b7"},
+ {file = "msgspec-0.18.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e77e56ffe2701e83a96e35770c6adb655ffc074d530018d1b584a8e635b4f36f"},
+ {file = "msgspec-0.18.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5351afb216b743df4b6b147691523697ff3a2fc5f3d54f771e91219f5c23aaa"},
+ {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3232fabacef86fe8323cecbe99abbc5c02f7698e3f5f2e248e3480b66a3596b"},
+ {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b524df6ea9998bbc99ea6ee4d0276a101bcc1aa8d14887bb823914d9f60d07"},
+ {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:37f67c1d81272131895bb20d388dd8d341390acd0e192a55ab02d4d6468b434c"},
+ {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0feb7a03d971c1c0353de1a8fe30bb6579c2dc5ccf29b5f7c7ab01172010492"},
+ {file = "msgspec-0.18.6-cp311-cp311-win_amd64.whl", hash = "sha256:41cf758d3f40428c235c0f27bc6f322d43063bc32da7b9643e3f805c21ed57b4"},
+ {file = "msgspec-0.18.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d86f5071fe33e19500920333c11e2267a31942d18fed4d9de5bc2fbab267d28c"},
+ {file = "msgspec-0.18.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce13981bfa06f5eb126a3a5a38b1976bddb49a36e4f46d8e6edecf33ccf11df1"},
+ {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97dec6932ad5e3ee1e3c14718638ba333befc45e0661caa57033cd4cc489466"},
+ {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad237100393f637b297926cae1868b0d500f764ccd2f0623a380e2bcfb2809ca"},
+ {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db1d8626748fa5d29bbd15da58b2d73af25b10aa98abf85aab8028119188ed57"},
+ {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d70cb3d00d9f4de14d0b31d38dfe60c88ae16f3182988246a9861259c6722af6"},
+ {file = "msgspec-0.18.6-cp312-cp312-win_amd64.whl", hash = "sha256:1003c20bfe9c6114cc16ea5db9c5466e49fae3d7f5e2e59cb70693190ad34da0"},
+ {file = "msgspec-0.18.6.tar.gz", hash = "sha256:a59fc3b4fcdb972d09138cb516dbde600c99d07c38fd9372a6ef500d2d031b4e"},
+]
+
+[[package]]
+name = "multidict"
+version = "6.0.5"
+requires_python = ">=3.7"
+summary = "multidict implementation"
+groups = ["default"]
+files = [
+ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
+ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
+ {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
+ {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
+ {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
+ {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
+ {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
+ {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
+ {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
+ {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
+ {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
+]
+
+[[package]]
+name = "multipledispatch"
+version = "1.0.0"
+summary = "Multiple dispatch"
+groups = ["default"]
+files = [
+ {file = "multipledispatch-1.0.0-py3-none-any.whl", hash = "sha256:0c53cd8b077546da4e48869f49b13164bebafd0c2a5afceb6bb6a316e7fb46e4"},
+ {file = "multipledispatch-1.0.0.tar.gz", hash = "sha256:5c839915465c68206c3e9c473357908216c28383b425361e5d144594bf85a7e0"},
+]
+
+[[package]]
+name = "multiprocess"
+version = "0.70.16"
+requires_python = ">=3.8"
+summary = "better multiprocessing and multithreading in Python"
+groups = ["default"]
+dependencies = [
+ "dill>=0.3.8",
+]
+files = [
+ {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"},
+ {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"},
+ {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"},
+ {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"},
+ {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"},
+ {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"},
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+requires_python = ">=3.5"
+summary = "Type system extensions for programs checked with the mypy type checker."
+groups = ["dev"]
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "networkx"
+version = "3.3"
+requires_python = ">=3.10"
+summary = "Python package for creating and manipulating graphs and networks"
+groups = ["default"]
+files = [
+ {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"},
+ {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
+]
+
+[[package]]
+name = "numpy"
+version = "1.26.4"
+requires_python = ">=3.9"
+summary = "Fundamental package for array computing in Python"
+groups = ["default"]
+files = [
+ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
+ {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
+ {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"},
+ {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"},
+ {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"},
+ {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"},
+ {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"},
+ {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"},
+ {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
+ {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
+ {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"},
+ {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"},
+ {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"},
+ {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"},
+ {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"},
+ {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"},
+ {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"},
+ {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"},
+ {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"},
+ {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"},
+ {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"},
+ {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"},
+ {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"},
+ {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"},
+ {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
+]
+
+[[package]]
+name = "nvidia-cublas-cu12"
+version = "12.1.3.1"
+requires_python = ">=3"
+summary = "CUBLAS native runtime libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"},
+ {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"},
+]
+
+[[package]]
+name = "nvidia-cuda-cupti-cu12"
+version = "12.1.105"
+requires_python = ">=3"
+summary = "CUDA profiling tools runtime libs."
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"},
+ {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"},
+]
+
+[[package]]
+name = "nvidia-cuda-nvrtc-cu12"
+version = "12.1.105"
+requires_python = ">=3"
+summary = "NVRTC native runtime libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"},
+ {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"},
+]
+
+[[package]]
+name = "nvidia-cuda-runtime-cu12"
+version = "12.1.105"
+requires_python = ">=3"
+summary = "CUDA Runtime native Libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"},
+ {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"},
+]
+
+[[package]]
+name = "nvidia-cudnn-cu12"
+version = "9.1.0.70"
+requires_python = ">=3"
+summary = "cuDNN runtime libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+dependencies = [
+ "nvidia-cublas-cu12",
+]
+files = [
+ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"},
+ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"},
+]
+
+[[package]]
+name = "nvidia-cufft-cu12"
+version = "11.0.2.54"
+requires_python = ">=3"
+summary = "CUFFT native runtime libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"},
+ {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"},
+]
+
+[[package]]
+name = "nvidia-curand-cu12"
+version = "10.3.2.106"
+requires_python = ">=3"
+summary = "CURAND native runtime libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"},
+ {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"},
+]
+
+[[package]]
+name = "nvidia-cusolver-cu12"
+version = "11.4.5.107"
+requires_python = ">=3"
+summary = "CUDA solver native runtime libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+dependencies = [
+ "nvidia-cublas-cu12",
+ "nvidia-cusparse-cu12",
+ "nvidia-nvjitlink-cu12",
+]
+files = [
+ {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"},
+ {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"},
+]
+
+[[package]]
+name = "nvidia-cusparse-cu12"
+version = "12.1.0.106"
+requires_python = ">=3"
+summary = "CUSPARSE native runtime libraries"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+dependencies = [
+ "nvidia-nvjitlink-cu12",
+]
+files = [
+ {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"},
+ {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"},
+]
+
+[[package]]
+name = "nvidia-nccl-cu12"
+version = "2.20.5"
+requires_python = ">=3"
+summary = "NVIDIA Collective Communication Library (NCCL) Runtime"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"},
+ {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"},
+]
+
+[[package]]
+name = "nvidia-nvjitlink-cu12"
+version = "12.6.20"
+requires_python = ">=3"
+summary = "Nvidia JIT LTO Library"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_aarch64.whl", hash = "sha256:84fb38465a5bc7c70cbc320cfd0963eb302ee25a5e939e9f512bbba55b6072fb"},
+ {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl", hash = "sha256:562ab97ea2c23164823b2a89cb328d01d45cb99634b8c65fe7cd60d14562bd79"},
+ {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-win_amd64.whl", hash = "sha256:ed3c43a17f37b0c922a919203d2d36cbef24d41cc3e6b625182f8b58203644f6"},
+]
+
+[[package]]
+name = "nvidia-nvtx-cu12"
+version = "12.1.105"
+requires_python = ">=3"
+summary = "NVIDIA Tools Extension"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\""
+files = [
+ {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"},
+ {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"},
+]
+
+[[package]]
+name = "omegaconf"
+version = "2.3.0"
+requires_python = ">=3.6"
+summary = "A flexible configuration library"
+groups = ["default"]
+dependencies = [
+ "PyYAML>=5.1.0",
+ "antlr4-python3-runtime==4.9.*",
+ "dataclasses; python_version == \"3.6\"",
+]
+files = [
+ {file = "omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b"},
+ {file = "omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7"},
+]
+
+[[package]]
+name = "opencensus"
+version = "0.11.4"
+summary = "A stats collection and distributed tracing framework"
+groups = ["default"]
+dependencies = [
+ "google-api-core<2.0.0,>=1.0.0; python_version < \"3.6\"",
+ "google-api-core<3.0.0,>=1.0.0; python_version >= \"3.6\"",
+ "opencensus-context>=0.1.3",
+ "six~=1.16",
+]
+files = [
+ {file = "opencensus-0.11.4-py2.py3-none-any.whl", hash = "sha256:a18487ce68bc19900336e0ff4655c5a116daf10c1b3685ece8d971bddad6a864"},
+ {file = "opencensus-0.11.4.tar.gz", hash = "sha256:cbef87d8b8773064ab60e5c2a1ced58bbaa38a6d052c41aec224958ce544eff2"},
+]
+
+[[package]]
+name = "opencensus-context"
+version = "0.1.3"
+summary = "OpenCensus Runtime Context"
+groups = ["default"]
+dependencies = [
+ "contextvars; python_version >= \"3.6\" and python_version < \"3.7\"",
+]
+files = [
+ {file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"},
+ {file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"},
+]
+
+[[package]]
+name = "opt-einsum"
+version = "3.3.0"
+requires_python = ">=3.5"
+summary = "Optimizing numpys einsum function"
+groups = ["default"]
+dependencies = [
+ "numpy>=1.7",
+]
+files = [
+ {file = "opt_einsum-3.3.0-py3-none-any.whl", hash = "sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147"},
+ {file = "opt_einsum-3.3.0.tar.gz", hash = "sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549"},
+]
+
+[[package]]
+name = "osqp"
+version = "0.6.7.post1"
+summary = "OSQP: The Operator Splitting QP Solver"
+groups = ["default"]
+dependencies = [
+ "numpy>=1.7",
+ "qdldl",
+ "scipy>=0.13.2",
+]
+files = [
+ {file = "osqp-0.6.7.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:14d221c049c2f1495a91d6683a3b0319f23d0c3e81b3aa5102e4b377ca002980"},
+ {file = "osqp-0.6.7.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b0bd72d95d6b97ab8273cdd08c1304dfeb6071e038a0b2d34fa2aebd16cfbec5"},
+ {file = "osqp-0.6.7.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4980f2ad0814898396a3ea522f46d199a3412bd3b191065d4ba6837e9cc4c1"},
+ {file = "osqp-0.6.7.post1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e99469b7986f9042925d3082cd6d02cdf012a32483603b64a713f0275de413bb"},
+ {file = "osqp-0.6.7.post1-cp310-cp310-win_amd64.whl", hash = "sha256:ab6e42c8af7c82f5b4b4b989a623151dca98e7bd6c131454edc8cf5cde2b3aa9"},
+ {file = "osqp-0.6.7.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d01d6b03628b851107671d7c785df147acea6865f090290a04e38ed250d8b829"},
+ {file = "osqp-0.6.7.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7082c852edc9afc63ba7b073bb2e559093b4df4eb24efff7b2f898241a83071c"},
+ {file = "osqp-0.6.7.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9f104c9710d8e51cded15ac9b2b9bc77bc265e70c891c671c1935e4b85b0810"},
+ {file = "osqp-0.6.7.post1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ae75ed6a5c6aba415b8d11963cec2b9ac4d7f1897067e9e095b60e81136022"},
+ {file = "osqp-0.6.7.post1-cp311-cp311-win_amd64.whl", hash = "sha256:117c30affdab60f5872d758c5ad82f5deb029b4fa84fea54bd04b8e7d884c5f6"},
+ {file = "osqp-0.6.7.post1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ed3c98cb31296368a72145875ceab3fb3e3497fdb820a185bf6b9ee39a3d5762"},
+ {file = "osqp-0.6.7.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:98fdee4065b9f65c37f63532ba8e11e7efddce3eb9a8b62961bf1a9b62105e0a"},
+ {file = "osqp-0.6.7.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20bf96fbf7d51abb95ab75e20508e37b1217cb467fc9cc9f73a584fbf1d5fc88"},
+ {file = "osqp-0.6.7.post1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a41a9765be0ef1817ebfdcff10018f792e5452a5af3f2864ea44d7cb22e663"},
+ {file = "osqp-0.6.7.post1-cp312-cp312-win_amd64.whl", hash = "sha256:fd56c7e82a6af11d96a549bb07d224359bcd148d4aae9180b8944d20eecd461b"},
+ {file = "osqp-0.6.7.post1.tar.gz", hash = "sha256:554aa10dca8481978b4d334e28201f24ed18f294c5a84350ce2022a8b78f4d72"},
+]
+
+[[package]]
+name = "packaging"
+version = "24.1"
+requires_python = ">=3.8"
+summary = "Core utilities for Python packages"
+groups = ["default", "dev", "docs"]
+files = [
+ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
+ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
+]
+
+[[package]]
+name = "pandas"
+version = "2.2.2"
+requires_python = ">=3.9"
+summary = "Powerful data structures for data analysis, time series, and statistics"
+groups = ["default"]
+dependencies = [
+ "numpy>=1.22.4; python_version < \"3.11\"",
+ "numpy>=1.23.2; python_version == \"3.11\"",
+ "numpy>=1.26.0; python_version >= \"3.12\"",
+ "python-dateutil>=2.8.2",
+ "pytz>=2020.1",
+ "tzdata>=2022.7",
+]
+files = [
+ {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
+ {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
+ {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
+ {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
+ {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
+ {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"},
+ {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"},
+ {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"},
+ {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"},
+ {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"},
+ {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"},
+ {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"},
+ {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"},
+ {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"},
+ {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"},
+ {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"},
+ {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"},
+ {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"},
+ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"},
+ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
+ {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
+ {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"},
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+requires_python = ">=3.8"
+summary = "Utility library for gitignore style pattern matching of file paths."
+groups = ["dev"]
+files = [
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.2.2"
+requires_python = ">=3.8"
+summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+groups = ["default", "dev"]
+files = [
+ {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
+ {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
+]
+
+[[package]]
+name = "plotly"
+version = "5.23.0"
+requires_python = ">=3.8"
+summary = "An open-source, interactive data visualization library for Python"
+groups = ["default"]
+dependencies = [
+ "packaging",
+ "tenacity>=6.2.0",
+]
+files = [
+ {file = "plotly-5.23.0-py3-none-any.whl", hash = "sha256:76cbe78f75eddc10c56f5a4ee3e7ccaade7c0a57465546f02098c0caed6c2d1a"},
+ {file = "plotly-5.23.0.tar.gz", hash = "sha256:89e57d003a116303a34de6700862391367dd564222ab71f8531df70279fc0193"},
+]
+
+[[package]]
+name = "pluggy"
+version = "1.5.0"
+requires_python = ">=3.8"
+summary = "plugin and hook calling mechanisms for python"
+groups = ["dev"]
+files = [
+ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+ {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
+]
+
+[[package]]
+name = "polyfactory"
+version = "2.16.2"
+requires_python = "<4.0,>=3.8"
+summary = "Mock data generation factories"
+groups = ["default"]
+dependencies = [
+ "faker",
+ "typing-extensions>=4.6.0",
+]
+files = [
+ {file = "polyfactory-2.16.2-py3-none-any.whl", hash = "sha256:e5eaf97358fee07d0d8de86a93e81dc56e3be1e1514d145fea6c5f486cda6ea1"},
+ {file = "polyfactory-2.16.2.tar.gz", hash = "sha256:6d0d90deb85e5bb1733ea8744c2d44eea2b31656e11b4fa73832d2e2ab5422da"},
+]
+
+[[package]]
+name = "prometheus-client"
+version = "0.20.0"
+requires_python = ">=3.8"
+summary = "Python client for the Prometheus monitoring system."
+groups = ["default"]
+files = [
+ {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"},
+ {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"},
+]
+
+[[package]]
+name = "proto-plus"
+version = "1.24.0"
+requires_python = ">=3.7"
+summary = "Beautiful, Pythonic protocol buffers."
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+dependencies = [
+ "protobuf<6.0.0dev,>=3.19.0",
+]
+files = [
+ {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"},
+ {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"},
+]
+
+[[package]]
+name = "protobuf"
+version = "5.27.3"
+requires_python = ">=3.8"
+summary = ""
+groups = ["default"]
+files = [
+ {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"},
+ {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"},
+ {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"},
+ {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"},
+ {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"},
+ {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"},
+ {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"},
+]
+
+[[package]]
+name = "py-spy"
+version = "0.3.14"
+summary = "Sampling profiler for Python programs "
+groups = ["default"]
+files = [
+ {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"},
+ {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"},
+ {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590905447241d789d9de36cff9f52067b6f18d8b5e9fb399242041568d414461"},
+ {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6211fe7f587b3532ba9d300784326d9a6f2b890af7bf6fff21a029ebbc812b"},
+ {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3e8e48032e71c94c3dd51694c39e762e4bbfec250df5bf514adcdd64e79371e0"},
+ {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f59b0b52e56ba9566305236375e6fc68888261d0d36b5addbe3cf85affbefc0e"},
+ {file = "py_spy-0.3.14-py2.py3-none-win_amd64.whl", hash = "sha256:8f5b311d09f3a8e33dbd0d44fc6e37b715e8e0c7efefafcda8bfd63b31ab5a31"},
+]
+
+[[package]]
+name = "pyasn1"
+version = "0.6.0"
+requires_python = ">=3.8"
+summary = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+files = [
+ {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"},
+ {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"},
+]
+
+[[package]]
+name = "pyasn1-modules"
+version = "0.4.0"
+requires_python = ">=3.8"
+summary = "A collection of ASN.1-based protocols modules"
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+dependencies = [
+ "pyasn1<0.7.0,>=0.4.6",
+]
+files = [
+ {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
+ {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+requires_python = ">=3.8"
+summary = "C parser in Python"
+groups = ["default"]
+files = [
+ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
+]
+
+[[package]]
+name = "pycryptodome"
+version = "3.20.0"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+summary = "Cryptographic library for Python"
+groups = ["default"]
+files = [
+ {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"},
+ {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"},
+ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"},
+ {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"},
+]
+
+[[package]]
+name = "pydantic"
+version = "2.9.1"
+requires_python = ">=3.8"
+summary = "Data validation using Python type hints"
+groups = ["default"]
+dependencies = [
+ "annotated-types>=0.6.0",
+ "pydantic-core==2.23.3",
+ "typing-extensions>=4.12.2; python_version >= \"3.13\"",
+ "typing-extensions>=4.6.1; python_version < \"3.13\"",
+]
+files = [
+ {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"},
+ {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"},
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.23.3"
+requires_python = ">=3.8"
+summary = "Core functionality for Pydantic validation and serialization"
+groups = ["default"]
+dependencies = [
+ "typing-extensions!=4.7.0,>=4.6.0",
+]
+files = [
+ {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"},
+ {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"},
+ {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"},
+ {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"},
+ {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"},
+ {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"},
+ {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"},
+ {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"},
+ {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"},
+ {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"},
+ {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"},
+ {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"},
+ {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"},
+ {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"},
+ {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"},
+]
+
+[[package]]
+name = "pydata-sphinx-theme"
+version = "0.15.4"
+requires_python = ">=3.9"
+summary = "Bootstrap-based Sphinx theme from the PyData community"
+groups = ["docs"]
+dependencies = [
+ "Babel",
+ "accessible-pygments",
+ "beautifulsoup4",
+ "docutils!=0.17.0",
+ "packaging",
+ "pygments>=2.7",
+ "sphinx>=5",
+ "typing-extensions",
+]
+files = [
+ {file = "pydata_sphinx_theme-0.15.4-py3-none-any.whl", hash = "sha256:2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6"},
+ {file = "pydata_sphinx_theme-0.15.4.tar.gz", hash = "sha256:7762ec0ac59df3acecf49fd2f889e1b4565dbce8b88b2e29ee06fdd90645a06d"},
+]
+
+[[package]]
+name = "pygments"
+version = "2.18.0"
+requires_python = ">=3.8"
+summary = "Pygments is a syntax highlighting package written in Python."
+groups = ["default", "docs"]
+files = [
+ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
+ {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
+]
+
+[[package]]
+name = "pymongo"
+version = "4.8.0"
+requires_python = ">=3.8"
+summary = "Python driver for MongoDB "
+groups = ["default"]
+dependencies = [
+ "dnspython<3.0.0,>=1.16.0",
+]
+files = [
+ {file = "pymongo-4.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2b7bec27e047e84947fbd41c782f07c54c30c76d14f3b8bf0c89f7413fac67a"},
+ {file = "pymongo-4.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c68fe128a171493018ca5c8020fc08675be130d012b7ab3efe9e22698c612a1"},
+ {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920d4f8f157a71b3cb3f39bc09ce070693d6e9648fb0e30d00e2657d1dca4e49"},
+ {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b4108ac9469febba18cea50db972605cc43978bedaa9fea413378877560ef8"},
+ {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:180d5eb1dc28b62853e2f88017775c4500b07548ed28c0bd9c005c3d7bc52526"},
+ {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aec2b9088cdbceb87e6ca9c639d0ff9b9d083594dda5ca5d3c4f6774f4c81b33"},
+ {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0cf61450feadca81deb1a1489cb1a3ae1e4266efd51adafecec0e503a8dcd84"},
+ {file = "pymongo-4.8.0-cp310-cp310-win32.whl", hash = "sha256:8b18c8324809539c79bd6544d00e0607e98ff833ca21953df001510ca25915d1"},
+ {file = "pymongo-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e5df28f74002e37bcbdfdc5109799f670e4dfef0fb527c391ff84f078050e7b5"},
+ {file = "pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68"},
+ {file = "pymongo-4.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:417369ce39af2b7c2a9c7152c1ed2393edfd1cbaf2a356ba31eb8bcbd5c98dd7"},
+ {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf821bd3befb993a6db17229a2c60c1550e957de02a6ff4dd0af9476637b2e4d"},
+ {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9365166aa801c63dff1a3cb96e650be270da06e3464ab106727223123405510f"},
+ {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc8b8582f4209c2459b04b049ac03c72c618e011d3caa5391ff86d1bda0cc486"},
+ {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e5019f75f6827bb5354b6fef8dfc9d6c7446894a27346e03134d290eb9e758"},
+ {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b5802151fc2b51cd45492c80ed22b441d20090fb76d1fd53cd7760b340ff554"},
+ {file = "pymongo-4.8.0-cp311-cp311-win32.whl", hash = "sha256:4bf58e6825b93da63e499d1a58de7de563c31e575908d4e24876234ccb910eba"},
+ {file = "pymongo-4.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b747c0e257b9d3e6495a018309b9e0c93b7f0d65271d1d62e572747f4ffafc88"},
+ {file = "pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526"},
+ {file = "pymongo-4.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31e4d21201bdf15064cf47ce7b74722d3e1aea2597c6785882244a3bb58c7eab"},
+ {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b804bb4f2d9dc389cc9e827d579fa327272cdb0629a99bfe5b83cb3e269ebf"},
+ {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2fbdb87fe5075c8beb17a5c16348a1ea3c8b282a5cb72d173330be2fecf22f5"},
+ {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd39455b7ee70aabee46f7399b32ab38b86b236c069ae559e22be6b46b2bbfc4"},
+ {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940d456774b17814bac5ea7fc28188c7a1338d4a233efbb6ba01de957bded2e8"},
+ {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:236bbd7d0aef62e64caf4b24ca200f8c8670d1a6f5ea828c39eccdae423bc2b2"},
+ {file = "pymongo-4.8.0-cp312-cp312-win32.whl", hash = "sha256:47ec8c3f0a7b2212dbc9be08d3bf17bc89abd211901093e3ef3f2adea7de7a69"},
+ {file = "pymongo-4.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e84bc7707492f06fbc37a9f215374d2977d21b72e10a67f1b31893ec5a140ad8"},
+ {file = "pymongo-4.8.0.tar.gz", hash = "sha256:454f2295875744dc70f1881e4b2eb99cdad008a33574bc8aaf120530f66c0cde"},
+]
+
+[[package]]
+name = "pyro-api"
+version = "0.1.2"
+summary = "Generic API for dispatch to Pyro backends."
+groups = ["default"]
+files = [
+ {file = "pyro-api-0.1.2.tar.gz", hash = "sha256:a1b900d9580aa1c2fab3b123ab7ff33413744da7c5f440bd4aadc4d40d14d920"},
+ {file = "pyro_api-0.1.2-py3-none-any.whl", hash = "sha256:10e0e42e9e4401ce464dab79c870e50dfb4f413d326fa777f3582928ef9caf8f"},
+]
+
+[[package]]
+name = "pyro-ppl"
+version = "1.9.1"
+requires_python = ">=3.8"
+summary = "A Python library for probabilistic modeling and inference"
+groups = ["default"]
+dependencies = [
+ "numpy>=1.7",
+ "opt-einsum>=2.3.2",
+ "pyro-api>=0.1.1",
+ "torch>=2.0",
+ "tqdm>=4.36",
+]
+files = [
+ {file = "pyro_ppl-1.9.1-py3-none-any.whl", hash = "sha256:91fb2c8740d9d3bd548180ac5ecfa04552ed8c471a1ab66870180663b8f09852"},
+ {file = "pyro_ppl-1.9.1.tar.gz", hash = "sha256:5e1596de276c038a3f77d2580a90d0a97126e0104900444a088eee620bb0d65e"},
+]
+
+[[package]]
+name = "pytest"
+version = "8.3.2"
+requires_python = ">=3.8"
+summary = "pytest: simple powerful testing with Python"
+groups = ["dev"]
+dependencies = [
+ "colorama; sys_platform == \"win32\"",
+ "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"",
+ "iniconfig",
+ "packaging",
+ "pluggy<2,>=1.5",
+ "tomli>=1; python_version < \"3.11\"",
+]
+files = [
+ {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
+ {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
+]
+
+[[package]]
+name = "pytest-asyncio"
+version = "0.23.8"
+requires_python = ">=3.8"
+summary = "Pytest support for asyncio"
+groups = ["dev"]
+dependencies = [
+ "pytest<9,>=7.0.0",
+]
+files = [
+ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"},
+ {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"},
+]
+
+[[package]]
+name = "pytest-cov"
+version = "5.0.0"
+requires_python = ">=3.8"
+summary = "Pytest plugin for measuring coverage."
+groups = ["dev"]
+dependencies = [
+ "coverage[toml]>=5.2.1",
+ "pytest>=4.6",
+]
+files = [
+ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+ {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+summary = "Extensions to the standard Python datetime module"
+groups = ["default"]
+dependencies = [
+ "six>=1.5",
+]
+files = [
+ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
+ {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.1"
+requires_python = ">=3.8"
+summary = "Read key-value pairs from a .env file and set them as environment variables"
+groups = ["default"]
+files = [
+ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
+ {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
+]
+
+[[package]]
+name = "pytz"
+version = "2024.1"
+summary = "World timezone definitions, modern and historical"
+groups = ["default"]
+files = [
+ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
+ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+requires_python = ">=3.8"
+summary = "YAML parser and emitter for Python"
+groups = ["default"]
+files = [
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
+]
+
+[[package]]
+name = "qdldl"
+version = "0.1.7.post4"
+summary = "QDLDL, a free LDL factorization routine."
+groups = ["default"]
+dependencies = [
+ "numpy>=1.7",
+ "scipy>=0.13.2",
+]
+files = [
+ {file = "qdldl-0.1.7.post4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff4a9c5f7fa96e222c767aaaabea9d5df1d099e172c14b322b98d54dac03705d"},
+ {file = "qdldl-0.1.7.post4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b53ad4ecd90c8031e0094fbab0b0bf09520b382177db63ec9568f06b4f16c219"},
+ {file = "qdldl-0.1.7.post4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:490b52049c4cd794cb9bb2a8b26d69e74bbb71e55b5f0cac1480de971970d79c"},
+ {file = "qdldl-0.1.7.post4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebf39433b467d2b33872e96fd05ed4a74d701eb94cd14cb010d5980fbdc02954"},
+ {file = "qdldl-0.1.7.post4-cp310-cp310-win_amd64.whl", hash = "sha256:5227ace6741618aa9aa2b0162740e806040f3a69e88204911e74b5d220d5bfce"},
+ {file = "qdldl-0.1.7.post4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6639d63c3bf9abbfdffafd3c99b7c603359ca748ab62117ec7fc0948a1c5e77"},
+ {file = "qdldl-0.1.7.post4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87f31e7f2a2708def201b6dc507a48ada7e0c37efd0afda7ef6ef94ae3487c2c"},
+ {file = "qdldl-0.1.7.post4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47c1b27712444d7b1030c562ed79af18320b4a910454716c9d88114e181eddec"},
+ {file = "qdldl-0.1.7.post4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1496a820ffb0c1a5bb18392b44052b83b5442745b15f62bbf2d22eec1f506afe"},
+ {file = "qdldl-0.1.7.post4-cp311-cp311-win_amd64.whl", hash = "sha256:b6f8d59c01fa5c9dc3b6463fc7e1de7601dcb1aa16b6e14a6d5d283169dc629f"},
+ {file = "qdldl-0.1.7.post4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2b9e92bb52d3bc49cfc9fd9a761adb692f049c46e68c0535ed07df2de8292f5"},
+ {file = "qdldl-0.1.7.post4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f6710b0c1013292697262803ddd549a81cdfdbdbbbcfa5b56aad04ac9cebbb4a"},
+ {file = "qdldl-0.1.7.post4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a40429f5c0d0edb28d22c4e52c2459fd9a64892ba7d8a39ba51a1a37b3581927"},
+ {file = "qdldl-0.1.7.post4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b85beb51096100dcdea575acedbafb5bac2b7f44485a1d7090bb68a47c8f9928"},
+ {file = "qdldl-0.1.7.post4-cp312-cp312-win_amd64.whl", hash = "sha256:684306b37a2f06f72c18edd2d6fa45a832e99071ebd87b875d172719e09a322d"},
+ {file = "qdldl-0.1.7.post4.tar.gz", hash = "sha256:0c163b9afb92c4b69d446387b1d4295094438b041ec4e8510271b6c4ff1f86fd"},
+]
+
+[[package]]
+name = "ray"
+version = "2.35.0"
+requires_python = ">=3.8"
+summary = "Ray provides a simple, universal API for building distributed applications."
+groups = ["default"]
+dependencies = [
+ "aiosignal",
+ "click>=7.0",
+ "filelock",
+ "frozenlist",
+ "jsonschema",
+ "msgpack<2.0.0,>=1.0.0",
+ "packaging",
+ "protobuf!=3.19.5,>=3.15.3",
+ "pyyaml",
+ "requests",
+]
+files = [
+ {file = "ray-2.35.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1e7e2d2e987be728a81821b6fd2bccb23e4d8a6cca8417db08b24f06a08d8476"},
+ {file = "ray-2.35.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bd48be4c362004d31e5df072fd58b929efc67adfefc0adece41483b15f84539"},
+ {file = "ray-2.35.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ef41e9254f3e18a90a8cf13fac9e35ac086eb778079ab6c76a37d3a6059186c5"},
+ {file = "ray-2.35.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:1994aaf9996ffc45019856545e817d527ad572762f1af76ad669ae4e786fcfd6"},
+ {file = "ray-2.35.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3b7a7d73f818e249064460ffa95402ebd852bf97d9ec6167b8b0d95be03da9f"},
+ {file = "ray-2.35.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:e29754fac4b69a9cb0d089841af59ec6fb10b5d4a248b7c579d319ca2ed1c96f"},
+ {file = "ray-2.35.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7a606c8ca53c64fc496703e9fd15d1a1ffb50e6b457a33d3622be2f13fc30a5"},
+ {file = "ray-2.35.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ac561e20a62ce941b74d02a0b92b7765c6ba87cc22e24f34f64ded2c454ba64e"},
+ {file = "ray-2.35.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:587af570cbe5f6cedca854f15107740e63c67207bee900713cb2ee38f6ebf20f"},
+ {file = "ray-2.35.0-cp311-cp311-win_amd64.whl", hash = "sha256:8e406cce41679790146d4d2b1b0cb0b413ca35276e43b68ee796366169c1dbde"},
+ {file = "ray-2.35.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:eb86355a3a0e794e2f1dbd5a84805dddfca64921ad0999b7fa5276e40d243692"},
+ {file = "ray-2.35.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b746913268d5ea5e19bff0eb6bdc7e0538036892a8b57c08411787481195df2"},
+ {file = "ray-2.35.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:e2ccfd144180f03d38b02a81afdac2b437f27e46736bf2653a1f0e8d67ea56cd"},
+ {file = "ray-2.35.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:2ca1a0de41d4462fd764598a5981cf55fc955599f38f9a1ae10868e94c6dd80d"},
+ {file = "ray-2.35.0-cp312-cp312-win_amd64.whl", hash = "sha256:c5600f745bb0e4df840a5cd51e82b1acf517f73505df9869fe3e369966956129"},
+]
+
+[[package]]
+name = "ray"
+version = "2.35.0"
+extras = ["default"]
+requires_python = ">=3.8"
+summary = "Ray provides a simple, universal API for building distributed applications."
+groups = ["default"]
+dependencies = [
+ "aiohttp-cors",
+ "aiohttp>=3.7",
+ "colorful",
+ "grpcio>=1.32.0; python_version < \"3.10\"",
+ "grpcio>=1.42.0; python_version >= \"3.10\"",
+ "memray; sys_platform != \"win32\"",
+ "opencensus",
+ "prometheus-client>=0.7.1",
+ "py-spy>=0.2.0",
+ "pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3",
+ "ray==2.35.0",
+ "requests",
+ "smart-open",
+ "virtualenv!=20.21.1,>=20.0.24",
+]
+files = [
+ {file = "ray-2.35.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1e7e2d2e987be728a81821b6fd2bccb23e4d8a6cca8417db08b24f06a08d8476"},
+ {file = "ray-2.35.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bd48be4c362004d31e5df072fd58b929efc67adfefc0adece41483b15f84539"},
+ {file = "ray-2.35.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ef41e9254f3e18a90a8cf13fac9e35ac086eb778079ab6c76a37d3a6059186c5"},
+ {file = "ray-2.35.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:1994aaf9996ffc45019856545e817d527ad572762f1af76ad669ae4e786fcfd6"},
+ {file = "ray-2.35.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3b7a7d73f818e249064460ffa95402ebd852bf97d9ec6167b8b0d95be03da9f"},
+ {file = "ray-2.35.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:e29754fac4b69a9cb0d089841af59ec6fb10b5d4a248b7c579d319ca2ed1c96f"},
+ {file = "ray-2.35.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7a606c8ca53c64fc496703e9fd15d1a1ffb50e6b457a33d3622be2f13fc30a5"},
+ {file = "ray-2.35.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ac561e20a62ce941b74d02a0b92b7765c6ba87cc22e24f34f64ded2c454ba64e"},
+ {file = "ray-2.35.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:587af570cbe5f6cedca854f15107740e63c67207bee900713cb2ee38f6ebf20f"},
+ {file = "ray-2.35.0-cp311-cp311-win_amd64.whl", hash = "sha256:8e406cce41679790146d4d2b1b0cb0b413ca35276e43b68ee796366169c1dbde"},
+ {file = "ray-2.35.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:eb86355a3a0e794e2f1dbd5a84805dddfca64921ad0999b7fa5276e40d243692"},
+ {file = "ray-2.35.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b746913268d5ea5e19bff0eb6bdc7e0538036892a8b57c08411787481195df2"},
+ {file = "ray-2.35.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:e2ccfd144180f03d38b02a81afdac2b437f27e46736bf2653a1f0e8d67ea56cd"},
+ {file = "ray-2.35.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:2ca1a0de41d4462fd764598a5981cf55fc955599f38f9a1ae10868e94c6dd80d"},
+ {file = "ray-2.35.0-cp312-cp312-win_amd64.whl", hash = "sha256:c5600f745bb0e4df840a5cd51e82b1acf517f73505df9869fe3e369966956129"},
+]
+
+[[package]]
+name = "referencing"
+version = "0.35.1"
+requires_python = ">=3.8"
+summary = "JSON Referencing + Python"
+groups = ["default"]
+dependencies = [
+ "attrs>=22.2.0",
+ "rpds-py>=0.7.0",
+]
+files = [
+ {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"},
+ {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"},
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+requires_python = ">=3.8"
+summary = "Python HTTP for Humans."
+groups = ["default", "docs"]
+dependencies = [
+ "certifi>=2017.4.17",
+ "charset-normalizer<4,>=2",
+ "idna<4,>=2.5",
+ "urllib3<3,>=1.21.1",
+]
+files = [
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+]
+
+[[package]]
+name = "rich"
+version = "13.8.1"
+requires_python = ">=3.7.0"
+summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+groups = ["default"]
+dependencies = [
+ "markdown-it-py>=2.2.0",
+ "pygments<3.0.0,>=2.13.0",
+ "typing-extensions<5.0,>=4.0.0; python_version < \"3.9\"",
+]
+files = [
+ {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"},
+ {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"},
+]
+
+[[package]]
+name = "rich-click"
+version = "1.8.3"
+requires_python = ">=3.7"
+summary = "Format click help output nicely with rich"
+groups = ["default"]
+dependencies = [
+ "click>=7",
+ "importlib-metadata; python_version < \"3.8\"",
+ "rich>=10.7",
+ "typing-extensions",
+]
+files = [
+ {file = "rich_click-1.8.3-py3-none-any.whl", hash = "sha256:636d9c040d31c5eee242201b5bf4f2d358bfae4db14bb22ec1cafa717cfd02cd"},
+ {file = "rich_click-1.8.3.tar.gz", hash = "sha256:6d75bdfa7aa9ed2c467789a0688bc6da23fbe3a143e19aa6ad3f8bac113d2ab3"},
+]
+
+[[package]]
+name = "rpds-py"
+version = "0.20.0"
+requires_python = ">=3.8"
+summary = "Python bindings to Rust's persistent data structures (rpds)"
+groups = ["default"]
+files = [
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"},
+ {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"},
+ {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"},
+ {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"},
+ {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"},
+ {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"},
+ {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"},
+ {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"},
+ {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"},
+ {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"},
+ {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"},
+ {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"},
+ {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"},
+ {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"},
+ {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"},
+]
+
+[[package]]
+name = "rsa"
+version = "4.9"
+requires_python = ">=3.6,<4"
+summary = "Pure-Python RSA implementation"
+groups = ["default"]
+marker = "python_version >= \"3.6\""
+dependencies = [
+ "pyasn1>=0.1.3",
+]
+files = [
+ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
+ {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
+]
+
+[[package]]
+name = "ruff"
+version = "0.5.7"
+requires_python = ">=3.7"
+summary = "An extremely fast Python linter and code formatter, written in Rust."
+groups = ["dev"]
+files = [
+ {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"},
+ {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"},
+ {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"},
+ {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"},
+ {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"},
+ {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"},
+ {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"},
+]
+
+[[package]]
+name = "scikit-learn"
+version = "1.5.1"
+requires_python = ">=3.9"
+summary = "A set of python modules for machine learning and data mining"
+groups = ["default"]
+dependencies = [
+ "joblib>=1.2.0",
+ "numpy>=1.19.5",
+ "scipy>=1.6.0",
+ "threadpoolctl>=3.1.0",
+]
+files = [
+ {file = "scikit_learn-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745"},
+ {file = "scikit_learn-1.5.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7"},
+ {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac"},
+ {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21"},
+ {file = "scikit_learn-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1"},
+ {file = "scikit_learn-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2"},
+ {file = "scikit_learn-1.5.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe"},
+ {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4"},
+ {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf"},
+ {file = "scikit_learn-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b"},
+ {file = "scikit_learn-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395"},
+ {file = "scikit_learn-1.5.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1"},
+ {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915"},
+ {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b"},
+ {file = "scikit_learn-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74"},
+ {file = "scikit_learn-1.5.1.tar.gz", hash = "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414"},
+]
+
+[[package]]
+name = "scipy"
+version = "1.14.0"
+requires_python = ">=3.10"
+summary = "Fundamental algorithms for scientific computing in Python"
+groups = ["default"]
+dependencies = [
+ "numpy<2.3,>=1.23.5",
+]
+files = [
+ {file = "scipy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484"},
+ {file = "scipy-1.14.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6"},
+ {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7"},
+ {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1"},
+ {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0"},
+ {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0"},
+ {file = "scipy-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d"},
+ {file = "scipy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359"},
+ {file = "scipy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e"},
+ {file = "scipy-1.14.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb"},
+ {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf"},
+ {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86"},
+ {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8"},
+ {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74"},
+ {file = "scipy-1.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb"},
+ {file = "scipy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc"},
+ {file = "scipy-1.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9"},
+ {file = "scipy-1.14.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4"},
+ {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0"},
+ {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f"},
+ {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209"},
+ {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14"},
+ {file = "scipy-1.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159"},
+ {file = "scipy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20"},
+ {file = "scipy-1.14.0.tar.gz", hash = "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b"},
+]
+
+[[package]]
+name = "scs"
+version = "3.2.6"
+requires_python = ">=3.7"
+summary = "Splitting conic solver"
+groups = ["default"]
+dependencies = [
+ "numpy",
+ "scipy",
+]
+files = [
+ {file = "scs-3.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df520880f456e94cb10a4a380d69ccf74d20f8e1576e3e70b4508d8bb897f62"},
+ {file = "scs-3.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ecff69a2e300eed03159059da5bf431f3077c7a147c56ec6e52605b35f0dba57"},
+ {file = "scs-3.2.6-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4716b670c1f29e75bd4ce993d5ed2b76a71d3bc04fd82696e4e6cdd0c8529580"},
+ {file = "scs-3.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:23cf3f783a9ad88b42a0dd6ca37b46d2a0a7776b49c851adc810bb0b8669865c"},
+ {file = "scs-3.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b2cb93492305dc17961602cc3d2d81d8918ce7f2cdd15e4d5958566a4bdfe5e1"},
+ {file = "scs-3.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8b34bc900cca3b56c7e4ff988eeee84b3dc15667d68f0f8fbbe4fbb2433c29c"},
+ {file = "scs-3.2.6-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bc4f91e0ce27f662d4fa12704212e7142e1cd1bd3db0344730f763e27e9824d8"},
+ {file = "scs-3.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:f971008b76272f085c9ffe9416ed4abe9f52cf111c6798491b3a5d4fcaf10476"},
+ {file = "scs-3.2.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7a40f42834109e079e2cd03bcb0a99540a04cd837bf8d7edb5407ad78e70fc70"},
+ {file = "scs-3.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:65d36db28955569cd56b0ef90aa551e5d9b1c42b9988e38844084b430b795d12"},
+ {file = "scs-3.2.6-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b65f6d5d8b1e3eb8bbbb0e9b791cd6e7ad8205d2c4023d8368762c19fe854f19"},
+ {file = "scs-3.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:55e3eb71880baf05fd0cd6671fbfb563210c2579e15cbc18491b5c90e659d258"},
+ {file = "scs-3.2.6.tar.gz", hash = "sha256:caf6ef48b86e8d4712a3d7b586ffb7a2b413c2a9664ac4da2c8de81dec6a1020"},
+]
+
+[[package]]
+name = "setuptools"
+version = "72.2.0"
+requires_python = ">=3.8"
+summary = "Easily download, build, install, upgrade, and uninstall Python packages"
+groups = ["default"]
+files = [
+ {file = "setuptools-72.2.0-py3-none-any.whl", hash = "sha256:f11dd94b7bae3a156a95ec151f24e4637fb4fa19c878e4d191bfb8b2d82728c4"},
+ {file = "setuptools-72.2.0.tar.gz", hash = "sha256:80aacbf633704e9c8bfa1d99fa5dd4dc59573efcf9e4042c13d3bcef91ac2ef9"},
+]
+
+[[package]]
+name = "shellingham"
+version = "1.5.4"
+requires_python = ">=3.7"
+summary = "Tool to Detect Surrounding Shell"
+groups = ["default"]
+files = [
+ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"},
+ {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+summary = "Python 2 and 3 compatibility utilities"
+groups = ["default"]
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "smart-open"
+version = "7.0.4"
+requires_python = "<4.0,>=3.7"
+summary = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)"
+groups = ["default"]
+dependencies = [
+ "wrapt",
+]
+files = [
+ {file = "smart_open-7.0.4-py3-none-any.whl", hash = "sha256:4e98489932b3372595cddc075e6033194775165702887216b65eba760dfd8d47"},
+ {file = "smart_open-7.0.4.tar.gz", hash = "sha256:62b65852bdd1d1d516839fcb1f6bc50cd0f16e05b4ec44b52f43d38bcb838524"},
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+requires_python = ">=3.7"
+summary = "Sniff out which async library your code is running under"
+groups = ["default", "docs"]
+files = [
+ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
+]
+
+[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+summary = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+groups = ["docs"]
+files = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.6"
+requires_python = ">=3.8"
+summary = "A modern CSS selector implementation for Beautiful Soup."
+groups = ["docs"]
+files = [
+ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"},
+ {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"},
+]
+
+[[package]]
+name = "sphinx"
+version = "8.0.2"
+requires_python = ">=3.10"
+summary = "Python documentation generator"
+groups = ["docs"]
+dependencies = [
+ "Jinja2>=3.1",
+ "Pygments>=2.17",
+ "alabaster>=0.7.14",
+ "babel>=2.13",
+ "colorama>=0.4.6; sys_platform == \"win32\"",
+ "docutils<0.22,>=0.20",
+ "imagesize>=1.3",
+ "packaging>=23.0",
+ "requests>=2.30.0",
+ "snowballstemmer>=2.2",
+ "sphinxcontrib-applehelp",
+ "sphinxcontrib-devhelp",
+ "sphinxcontrib-htmlhelp>=2.0.0",
+ "sphinxcontrib-jsmath",
+ "sphinxcontrib-qthelp",
+ "sphinxcontrib-serializinghtml>=1.1.9",
+ "tomli>=2; python_version < \"3.11\"",
+]
+files = [
+ {file = "sphinx-8.0.2-py3-none-any.whl", hash = "sha256:56173572ae6c1b9a38911786e206a110c9749116745873feae4f9ce88e59391d"},
+ {file = "sphinx-8.0.2.tar.gz", hash = "sha256:0cce1ddcc4fd3532cf1dd283bc7d886758362c5c1de6598696579ce96d8ffa5b"},
+]
+
+[[package]]
+name = "sphinx-autobuild"
+version = "2024.4.16"
+requires_python = ">=3.9"
+summary = "Rebuild Sphinx documentation on changes, with hot reloading in the browser."
+groups = ["docs"]
+dependencies = [
+ "colorama",
+ "sphinx",
+ "starlette>=0.35",
+ "uvicorn>=0.25",
+ "watchfiles>=0.20",
+ "websockets>=11",
+]
+files = [
+ {file = "sphinx_autobuild-2024.4.16-py3-none-any.whl", hash = "sha256:f2522779d30fcbf0253e09714f274ce8c608cb6ebcd67922b1c54de59faba702"},
+ {file = "sphinx_autobuild-2024.4.16.tar.gz", hash = "sha256:1c0ed37a1970eed197f9c5a66d65759e7c4e4cba7b5a5d77940752bf1a59f2c7"},
+]
+
+[[package]]
+name = "sphinx-click"
+version = "6.0.0"
+requires_python = ">=3.8"
+summary = "Sphinx extension that automatically documents click applications"
+groups = ["docs"]
+dependencies = [
+ "click>=8.0",
+ "docutils",
+ "sphinx>=4.0",
+]
+files = [
+ {file = "sphinx_click-6.0.0-py3-none-any.whl", hash = "sha256:1e0a3c83bcb7c55497751b19d07ebe56b5d7b85eb76dd399cf9061b497adc317"},
+ {file = "sphinx_click-6.0.0.tar.gz", hash = "sha256:f5d664321dc0c6622ff019f1e1c84e58ce0cecfddeb510e004cf60c2a3ab465b"},
+]
+
+[[package]]
+name = "sphinx-copybutton"
+version = "0.5.2"
+requires_python = ">=3.7"
+summary = "Add a copy button to each of your code cells."
+groups = ["docs"]
+dependencies = [
+ "sphinx>=1.8",
+]
+files = [
+ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"},
+ {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"},
+]
+
+[[package]]
+name = "sphinx-design"
+version = "0.6.1"
+requires_python = ">=3.9"
+summary = "A sphinx extension for designing beautiful, view size responsive web components."
+groups = ["docs"]
+dependencies = [
+ "sphinx<9,>=6",
+]
+files = [
+ {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"},
+ {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"},
+]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "2.0.0"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
+groups = ["docs"]
+files = [
+ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"},
+ {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"},
+]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "2.0.0"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
+groups = ["docs"]
+files = [
+ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"},
+ {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"},
+]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.1.0"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+groups = ["docs"]
+files = [
+ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"},
+ {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"},
+]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+requires_python = ">=3.5"
+summary = "A sphinx extension which renders display math in HTML via JavaScript"
+groups = ["docs"]
+files = [
+ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+ {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "2.0.0"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
+groups = ["docs"]
+files = [
+ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"},
+ {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"},
+]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "2.0.0"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
+groups = ["docs"]
+files = [
+ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"},
+ {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"},
+]
+
+[[package]]
+name = "starlette"
+version = "0.38.2"
+requires_python = ">=3.8"
+summary = "The little ASGI library that shines."
+groups = ["docs"]
+dependencies = [
+ "anyio<5,>=3.4.0",
+ "typing-extensions>=3.10.0; python_version < \"3.10\"",
+]
+files = [
+ {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"},
+ {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"},
+]
+
+[[package]]
+name = "sympy"
+version = "1.13.2"
+requires_python = ">=3.8"
+summary = "Computer algebra system (CAS) in Python"
+groups = ["default"]
+dependencies = [
+ "mpmath<1.4,>=1.1.0",
+]
+files = [
+ {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"},
+ {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"},
+]
+
+[[package]]
+name = "tenacity"
+version = "9.0.0"
+requires_python = ">=3.8"
+summary = "Retry code until it succeeds"
+groups = ["default"]
+files = [
+ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
+ {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
+]
+
+[[package]]
+name = "textual"
+version = "0.76.0"
+requires_python = "<4.0.0,>=3.8.1"
+summary = "Modern Text User Interface framework"
+groups = ["default"]
+marker = "sys_platform != \"win32\""
+dependencies = [
+ "markdown-it-py[linkify,plugins]>=2.1.0",
+ "rich>=13.3.3",
+ "typing-extensions<5.0.0,>=4.4.0",
+]
+files = [
+ {file = "textual-0.76.0-py3-none-any.whl", hash = "sha256:e2035609c889dba507d34a5d7b333f1c8c53a29fb170962cb92101507663517a"},
+ {file = "textual-0.76.0.tar.gz", hash = "sha256:b12e8879d591090c0901b5cb8121d086e28e677353b368292d3865ec99b83b70"},
+]
+
+[[package]]
+name = "threadpoolctl"
+version = "3.5.0"
+requires_python = ">=3.8"
+summary = "threadpoolctl"
+groups = ["default"]
+files = [
+ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"},
+ {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+requires_python = ">=3.7"
+summary = "A lil' TOML parser"
+groups = ["dev", "docs"]
+marker = "python_version < \"3.11\""
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "torch"
+version = "2.4.0"
+requires_python = ">=3.8.0"
+summary = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
+groups = ["default"]
+dependencies = [
+ "filelock",
+ "fsspec",
+ "jinja2",
+ "networkx",
+ "nvidia-cublas-cu12==12.1.3.1; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-cuda-cupti-cu12==12.1.105; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-cuda-runtime-cu12==12.1.105; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-cudnn-cu12==9.1.0.70; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-cufft-cu12==11.0.2.54; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-curand-cu12==10.3.2.106; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-cusolver-cu12==11.4.5.107; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-cusparse-cu12==12.1.0.106; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-nccl-cu12==2.20.5; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "nvidia-nvtx-cu12==12.1.105; platform_system == \"Linux\" and platform_machine == \"x86_64\"",
+ "setuptools",
+ "sympy",
+ "triton==3.0.0; platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\"",
+ "typing-extensions>=4.8.0",
+]
+files = [
+ {file = "torch-2.4.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:4ed94583e244af51d6a8d28701ca5a9e02d1219e782f5a01dd401f90af17d8ac"},
+ {file = "torch-2.4.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c4ca297b7bd58b506bfd6e78ffd14eb97c0e7797dcd7965df62f50bb575d8954"},
+ {file = "torch-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:2497cbc7b3c951d69b276ca51fe01c2865db67040ac67f5fc20b03e41d16ea4a"},
+ {file = "torch-2.4.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:685418ab93730efbee71528821ff54005596970dd497bf03c89204fb7e3f71de"},
+ {file = "torch-2.4.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e743adadd8c8152bb8373543964551a7cb7cc20ba898dc8f9c0cdbe47c283de0"},
+ {file = "torch-2.4.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:7334325c0292cbd5c2eac085f449bf57d3690932eac37027e193ba775703c9e6"},
+ {file = "torch-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:97730014da4c57ffacb3c09298c6ce05400606e890bd7a05008d13dd086e46b1"},
+ {file = "torch-2.4.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:f169b4ea6dc93b3a33319611fcc47dc1406e4dd539844dcbd2dec4c1b96e166d"},
+ {file = "torch-2.4.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:997084a0f9784d2a89095a6dc67c7925e21bf25dea0b3d069b41195016ccfcbb"},
+ {file = "torch-2.4.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:bc3988e8b36d1e8b998d143255d9408d8c75da4ab6dd0dcfd23b623dfb0f0f57"},
+ {file = "torch-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:3374128bbf7e62cdaed6c237bfd39809fbcfaa576bee91e904706840c3f2195c"},
+ {file = "torch-2.4.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:91aaf00bfe1ffa44dc5b52809d9a95129fca10212eca3ac26420eb11727c6288"},
+]
+
+[[package]]
+name = "tqdm"
+version = "4.66.5"
+requires_python = ">=3.7"
+summary = "Fast, Extensible Progress Meter"
+groups = ["default"]
+dependencies = [
+ "colorama; platform_system == \"Windows\"",
+]
+files = [
+ {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"},
+ {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"},
+]
+
+[[package]]
+name = "triton"
+version = "3.0.0"
+summary = "A language and compiler for custom Deep Learning operations"
+groups = ["default"]
+marker = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""
+dependencies = [
+ "filelock",
+]
+files = [
+ {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"},
+ {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"},
+ {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"},
+ {file = "triton-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b052da883351fdf6be3d93cedae6db3b8e3988d3b09ed221bccecfa9612230"},
+ {file = "triton-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd34f19a8582af96e6291d4afce25dac08cb2a5d218c599163761e8e0827208e"},
+ {file = "triton-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d5e10de8c011adeb7c878c6ce0dd6073b14367749e34467f1cff2bde1b78253"},
+]
+
+[[package]]
+name = "typeguard"
+version = "2.13.3"
+requires_python = ">=3.5.3"
+summary = "Run-time type checker for Python"
+groups = ["default"]
+files = [
+ {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"},
+ {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"},
+]
+
+[[package]]
+name = "typer"
+version = "0.12.5"
+requires_python = ">=3.7"
+summary = "Typer, build great CLIs. Easy to code. Based on Python type hints."
+groups = ["default"]
+dependencies = [
+ "click>=8.0.0",
+ "rich>=10.11.0",
+ "shellingham>=1.3.0",
+ "typing-extensions>=3.7.4.3",
+]
+files = [
+ {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"},
+ {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+requires_python = ">=3.8"
+summary = "Backported and Experimental Type Hints for Python 3.8+"
+groups = ["default", "dev", "docs"]
+files = [
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
+]
+
+[[package]]
+name = "tzdata"
+version = "2024.1"
+requires_python = ">=2"
+summary = "Provider of IANA time zone data"
+groups = ["default"]
+files = [
+ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
+ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
+]
+
+[[package]]
+name = "uc-micro-py"
+version = "1.0.3"
+requires_python = ">=3.7"
+summary = "Micro subset of unicode data files for linkify-it-py projects."
+groups = ["default"]
+marker = "sys_platform != \"win32\""
+files = [
+ {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"},
+ {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.2.2"
+requires_python = ">=3.8"
+summary = "HTTP library with thread-safe connection pooling, file post, and more."
+groups = ["default", "docs"]
+files = [
+ {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
+ {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.30.6"
+requires_python = ">=3.8"
+summary = "The lightning-fast ASGI server."
+groups = ["default", "docs"]
+dependencies = [
+ "click>=7.0",
+ "h11>=0.8",
+ "typing-extensions>=4.0; python_version < \"3.11\"",
+]
+files = [
+ {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"},
+ {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"},
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.30.6"
+extras = ["standard"]
+requires_python = ">=3.8"
+summary = "The lightning-fast ASGI server."
+groups = ["default"]
+dependencies = [
+ "colorama>=0.4; sys_platform == \"win32\"",
+ "httptools>=0.5.0",
+ "python-dotenv>=0.13",
+ "pyyaml>=5.1",
+ "uvicorn==0.30.6",
+ "uvloop!=0.15.0,!=0.15.1,>=0.14.0; (sys_platform != \"cygwin\" and sys_platform != \"win32\") and platform_python_implementation != \"PyPy\"",
+ "watchfiles>=0.13",
+ "websockets>=10.4",
+]
+files = [
+ {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"},
+ {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"},
+]
+
+[[package]]
+name = "uvloop"
+version = "0.19.0"
+requires_python = ">=3.8.0"
+summary = "Fast implementation of asyncio event loop on top of libuv"
+groups = ["default"]
+marker = "sys_platform != \"win32\""
+files = [
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"},
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"},
+ {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"},
+]
+
+[[package]]
+name = "virtualenv"
+version = "20.26.3"
+requires_python = ">=3.7"
+summary = "Virtual Python Environment builder"
+groups = ["default"]
+dependencies = [
+ "distlib<1,>=0.3.7",
+ "filelock<4,>=3.12.2",
+ "importlib-metadata>=6.6; python_version < \"3.8\"",
+ "platformdirs<5,>=3.9.1",
+]
+files = [
+ {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"},
+ {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"},
+]
+
+[[package]]
+name = "watchfiles"
+version = "0.23.0"
+requires_python = ">=3.8"
+summary = "Simple, modern and high performance file watching and code reload in python."
+groups = ["default", "docs"]
+dependencies = [
+ "anyio>=3.0.0",
+]
+files = [
+ {file = "watchfiles-0.23.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bee8ce357a05c20db04f46c22be2d1a2c6a8ed365b325d08af94358e0688eeb4"},
+ {file = "watchfiles-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ccd3011cc7ee2f789af9ebe04745436371d36afe610028921cab9f24bb2987b"},
+ {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb02d41c33be667e6135e6686f1bb76104c88a312a18faa0ef0262b5bf7f1a0f"},
+ {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf12ac34c444362f3261fb3ff548f0037ddd4c5bb85f66c4be30d2936beb3c5"},
+ {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b2c25040a3c0ce0e66c7779cc045fdfbbb8d59e5aabfe033000b42fe44b53e"},
+ {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf2be4b9eece4f3da8ba5f244b9e51932ebc441c0867bd6af46a3d97eb068d6"},
+ {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40cb8fa00028908211eb9f8d47744dca21a4be6766672e1ff3280bee320436f1"},
+ {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f48c917ffd36ff9a5212614c2d0d585fa8b064ca7e66206fb5c095015bc8207"},
+ {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d183e3888ada88185ab17064079c0db8c17e32023f5c278d7bf8014713b1b5b"},
+ {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9837edf328b2805346f91209b7e660f65fb0e9ca18b7459d075d58db082bf981"},
+ {file = "watchfiles-0.23.0-cp310-none-win32.whl", hash = "sha256:296e0b29ab0276ca59d82d2da22cbbdb39a23eed94cca69aed274595fb3dfe42"},
+ {file = "watchfiles-0.23.0-cp310-none-win_amd64.whl", hash = "sha256:4ea756e425ab2dfc8ef2a0cb87af8aa7ef7dfc6fc46c6f89bcf382121d4fff75"},
+ {file = "watchfiles-0.23.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e397b64f7aaf26915bf2ad0f1190f75c855d11eb111cc00f12f97430153c2eab"},
+ {file = "watchfiles-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4ac73b02ca1824ec0a7351588241fd3953748d3774694aa7ddb5e8e46aef3e3"},
+ {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130a896d53b48a1cecccfa903f37a1d87dbb74295305f865a3e816452f6e49e4"},
+ {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5e7803a65eb2d563c73230e9d693c6539e3c975ccfe62526cadde69f3fda0cf"},
+ {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1aa4cc85202956d1a65c88d18c7b687b8319dbe6b1aec8969784ef7a10e7d1a"},
+ {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f889f6e58849ddb7c5d2cb19e2e074917ed1c6e3ceca50405775166492cca8"},
+ {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37fd826dac84c6441615aa3f04077adcc5cac7194a021c9f0d69af20fb9fa788"},
+ {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee7db6e36e7a2c15923072e41ea24d9a0cf39658cb0637ecc9307b09d28827e1"},
+ {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2368c5371c17fdcb5a2ea71c5c9d49f9b128821bfee69503cc38eae00feb3220"},
+ {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:857af85d445b9ba9178db95658c219dbd77b71b8264e66836a6eba4fbf49c320"},
+ {file = "watchfiles-0.23.0-cp311-none-win32.whl", hash = "sha256:1d636c8aeb28cdd04a4aa89030c4b48f8b2954d8483e5f989774fa441c0ed57b"},
+ {file = "watchfiles-0.23.0-cp311-none-win_amd64.whl", hash = "sha256:46f1d8069a95885ca529645cdbb05aea5837d799965676e1b2b1f95a4206313e"},
+ {file = "watchfiles-0.23.0-cp311-none-win_arm64.whl", hash = "sha256:e495ed2a7943503766c5d1ff05ae9212dc2ce1c0e30a80d4f0d84889298fa304"},
+ {file = "watchfiles-0.23.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1db691bad0243aed27c8354b12d60e8e266b75216ae99d33e927ff5238d270b5"},
+ {file = "watchfiles-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62d2b18cb1edaba311fbbfe83fb5e53a858ba37cacb01e69bc20553bb70911b8"},
+ {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e087e8fdf1270d000913c12e6eca44edd02aad3559b3e6b8ef00f0ce76e0636f"},
+ {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd41d5c72417b87c00b1b635738f3c283e737d75c5fa5c3e1c60cd03eac3af77"},
+ {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5f3ca0ff47940ce0a389457b35d6df601c317c1e1a9615981c474452f98de1"},
+ {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6991e3a78f642368b8b1b669327eb6751439f9f7eaaa625fae67dd6070ecfa0b"},
+ {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f7252f52a09f8fa5435dc82b6af79483118ce6bd51eb74e6269f05ee22a7b9f"},
+ {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e01bcb8d767c58865207a6c2f2792ad763a0fe1119fb0a430f444f5b02a5ea0"},
+ {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e56fbcdd27fce061854ddec99e015dd779cae186eb36b14471fc9ae713b118c"},
+ {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd3e2d64500a6cad28bcd710ee6269fbeb2e5320525acd0cfab5f269ade68581"},
+ {file = "watchfiles-0.23.0-cp312-none-win32.whl", hash = "sha256:eb99c954291b2fad0eff98b490aa641e128fbc4a03b11c8a0086de8b7077fb75"},
+ {file = "watchfiles-0.23.0-cp312-none-win_amd64.whl", hash = "sha256:dccc858372a56080332ea89b78cfb18efb945da858fabeb67f5a44fa0bcb4ebb"},
+ {file = "watchfiles-0.23.0-cp312-none-win_arm64.whl", hash = "sha256:6c21a5467f35c61eafb4e394303720893066897fca937bade5b4f5877d350ff8"},
+ {file = "watchfiles-0.23.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ba31c32f6b4dceeb2be04f717811565159617e28d61a60bb616b6442027fd4b9"},
+ {file = "watchfiles-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85042ab91814fca99cec4678fc063fb46df4cbb57b4835a1cc2cb7a51e10250e"},
+ {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24655e8c1c9c114005c3868a3d432c8aa595a786b8493500071e6a52f3d09217"},
+ {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1a950ab299a4a78fd6369a97b8763732bfb154fdb433356ec55a5bce9515c1"},
+ {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8d3c5cd327dd6ce0edfc94374fb5883d254fe78a5e9d9dfc237a1897dc73cd1"},
+ {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ff785af8bacdf0be863ec0c428e3288b817e82f3d0c1d652cd9c6d509020dd0"},
+ {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b7ba9d4557149410747353e7325010d48edcfe9d609a85cb450f17fd50dc3d"},
+ {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a1b05c0afb2cd2f48c1ed2ae5487b116e34b93b13074ed3c22ad5c743109f0"},
+ {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:109a61763e7318d9f821b878589e71229f97366fa6a5c7720687d367f3ab9eef"},
+ {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9f8e6bb5ac007d4a4027b25f09827ed78cbbd5b9700fd6c54429278dacce05d1"},
+ {file = "watchfiles-0.23.0-cp313-none-win32.whl", hash = "sha256:f46c6f0aec8d02a52d97a583782d9af38c19a29900747eb048af358a9c1d8e5b"},
+ {file = "watchfiles-0.23.0-cp313-none-win_amd64.whl", hash = "sha256:f449afbb971df5c6faeb0a27bca0427d7b600dd8f4a068492faec18023f0dcff"},
+ {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9265cf87a5b70147bfb2fec14770ed5b11a5bb83353f0eee1c25a81af5abfe"},
+ {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f02a259fcbbb5fcfe7a0805b1097ead5ba7a043e318eef1db59f93067f0b49b"},
+ {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebaebb53b34690da0936c256c1cdb0914f24fb0e03da76d185806df9328abed"},
+ {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd257f98cff9c6cb39eee1a83c7c3183970d8a8d23e8cf4f47d9a21329285cee"},
+ {file = "watchfiles-0.23.0.tar.gz", hash = "sha256:9338ade39ff24f8086bb005d16c29f8e9f19e55b18dcb04dfa26fcbc09da497b"},
+]
+
+[[package]]
+name = "websockets"
+version = "12.0"
+requires_python = ">=3.8"
+summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+groups = ["default", "docs"]
+files = [
+ {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"},
+ {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"},
+ {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"},
+ {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"},
+ {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"},
+ {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"},
+ {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"},
+ {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"},
+ {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"},
+ {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"},
+ {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"},
+ {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"},
+ {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"},
+ {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"},
+ {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"},
+ {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"},
+ {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"},
+ {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"},
+ {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"},
+ {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"},
+ {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"},
+ {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"},
+ {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"},
+ {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"},
+ {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"},
+ {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"},
+ {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"},
+ {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"},
+ {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"},
+ {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"},
+ {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"},
+ {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"},
+ {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"},
+ {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"},
+ {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
+]
+
+[[package]]
+name = "wrapt"
+version = "1.16.0"
+requires_python = ">=3.6"
+summary = "Module for decorators, wrappers and monkey patching."
+groups = ["default"]
+files = [
+ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
+ {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"},
+ {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"},
+ {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"},
+ {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"},
+ {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"},
+ {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"},
+ {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"},
+ {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"},
+ {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"},
+ {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"},
+ {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"},
+ {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"},
+ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
+]
+
+[[package]]
+name = "yarl"
+version = "1.9.4"
+requires_python = ">=3.7"
+summary = "Yet another URL library"
+groups = ["default"]
+dependencies = [
+ "idna>=2.0",
+ "multidict>=4.0",
+ "typing-extensions>=3.7.4; python_version < \"3.8\"",
+]
+files = [
+ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
+ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
+ {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
+ {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
+ {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
+ {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
+ {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
+ {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
+ {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
+ {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
+ {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
+]
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..e342404
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,151 @@
+[project]
+name = "eos"
+version = "0.3.0"
+description = "The Experiment Orchestration System (EOS) is a comprehensive software framework and runtime for laboratory automation."
+keywords = ["automation", "science", "lab", "experiment", "orchestration", "distributed", "infrastructure"]
+authors = [
+ { name = "Angelos Angelopoulos", email = "aangelos@cs.unc.edu" }
+]
+license = { text = "BSD 3-Clause" }
+classifiers = [
+ "Development Status :: 3 - Alpha",
+ "Natural Language :: English",
+ "Operating System :: OS Independent",
+ "License :: OSI Approved :: BSD License",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python",
+ "Typing :: Typed",
+ "Intended Audience :: Science/Research",
+]
+readme = "README.md"
+requires-python = ">=3.10"
+dependencies = [
+ "ray[default]~=2.35.0",
+ "typer~=0.12.5",
+ "rich~=13.8.1",
+ "omegaconf~=2.3.0",
+ "jinja2~=3.1.4",
+ "PyYAML~=6.0.2",
+ "networkx~=3.3.0",
+ "pymongo~=4.8.0",
+ "pydantic~=2.9.1",
+ "bofire[optimization]~=0.0.13",
+ "pandas~=2.2.2",
+ "numpy~=1.26.2",
+ "litestar[standard]~=2.11.0",
+ "minio~=7.2.8",
+]
+
+[project.optional-dependencies]
+dev = [
+ "ruff",
+ "pytest",
+ "pytest-cov",
+ "pytest-asyncio",
+ "black",
+]
+docs = [
+ "sphinx",
+ "sphinx-autobuild",
+ "sphinx-copybutton",
+ "sphinx-design",
+ "sphinx-click",
+ "pydata-sphinx-theme",
+]
+
+[build-system]
+requires = ["pdm-backend"]
+build-backend = "pdm.backend"
+
+[tool.pdm.build]
+includes = ["eos"]
+
+[tool.pdm.scripts]
+test = "pytest"
+test-with-cov = "pytest --cov=eos"
+cov-report = "coverage html"
+lint = "ruff check eos tests"
+format = "black ."
+docs-build = "sphinx-build docs docs/_build"
+docs-build-gh = { shell = "sphinx-build docs docs/_build && touch docs/_build/.nojekyll" }
+docs-serve = "sphinx-autobuild docs docs/_build/ -j auto --watch eos --watch docs --port 8002"
+
+[project.scripts]
+eos = "eos.eos:eos_app"
+
+[tool.black]
+line-length = 120
+
+[tool.pytest.ini_options]
+testpaths = [
+ "tests",
+]
+
+[tool.ruff]
+include = [
+ "{eos,tests}/**/*.py",
+ "pyproject.toml"
+]
+target-version = "py310"
+line-length = 120
+
+lint.mccabe.max-complexity = 14
+lint.isort.known-first-party = ["eos", "tests"]
+
+lint.select = [
+ "A", # flake8-builtins
+ "ANN", # flake8-annotations
+ "ASYNC", # flake8-async
+ "B", # flake8-bugbear
+ "C4", # flake8-comprehensions
+ "C90", # mccabe
+ "DTZ", # flake8-datetimez
+ "E", # pycodestyle errors
+ "ERA", # eradicate
+ "EXE", # flake8-executable
+ "F", # pyflakes
+ "G", # flake8-logging-format
+ "I", # isort
+ "ICN", # flake8-import-conventions
+ "ISC", # flake8-implicit-str-concat
+ "N", # pep8-naming
+ "PIE", # flake8-pie
+ "PLC", # pylint - convention
+ "PT", # flake8-pytest
+ "PLE", # pylint - error
+ "PLR", # pylint - refactor
+ "PLW", # pylint - warning
+ "PTH", # flake8-use-pathlib
+ "Q", # flake8-quotes
+ "RET", # flake8-return
+ "RUF", # Ruff-specific rules
+ "S", # flake8-bandit
+ "SIM", # flake8-simplify
+ "T10", # flake8-debugger
+ "T20", # flake8-print
+ "TCH", # flake8-type-checking
+ "TID", # flake8-tidy-imports
+ "UP", # pyupgrade
+ "W", # pycodestyle - warning
+ "YTT", # flake8-2020
+]
+lint.ignore = ["I001", "ANN001", "ANN002", "ANN003", "ANN101", "ANN204", "ANN401"]
+
+[tool.ruff.lint.per-file-ignores]
+"tests/**/*.*" = [
+ "S",
+ "S101",
+ "I001",
+ "F405",
+ "F403",
+ "T201",
+ "D",
+ "ANN",
+ "PT001",
+ "PT004",
+ "PT023",
+ "PLR0913",
+ "PLR2004",
+]
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/fixtures.py b/tests/fixtures.py
new file mode 100644
index 0000000..95d0aad
--- /dev/null
+++ b/tests/fixtures.py
@@ -0,0 +1,292 @@
+import os
+from pathlib import Path
+
+import pytest
+import ray
+import yaml
+
+from eos.campaigns.campaign_executor import CampaignExecutor
+from eos.campaigns.campaign_manager import CampaignManager
+from eos.campaigns.campaign_optimizer_manager import CampaignOptimizerManager
+from eos.campaigns.entities.campaign import CampaignExecutionParameters
+from eos.configuration.configuration_manager import ConfigurationManager
+from eos.configuration.experiment_graph.experiment_graph import ExperimentGraph
+from eos.containers.container_manager import ContainerManager
+from eos.devices.device_manager import DeviceManager
+from eos.experiments.entities.experiment import ExperimentExecutionParameters
+from eos.experiments.experiment_executor import ExperimentExecutor
+from eos.experiments.experiment_executor_factory import ExperimentExecutorFactory
+from eos.experiments.experiment_manager import ExperimentManager
+from eos.logging.logger import log
+from eos.persistence.db_manager import DbManager
+from eos.persistence.file_db_manager import FileDbManager
+from eos.persistence.service_credentials import ServiceCredentials
+from eos.resource_allocation.container_allocation_manager import ContainerAllocationManager
+from eos.resource_allocation.device_allocation_manager import DeviceAllocationManager
+from eos.resource_allocation.resource_allocation_manager import (
+ ResourceAllocationManager,
+)
+from eos.scheduling.basic_scheduler import BasicScheduler
+from eos.tasks.task_executor import TaskExecutor
+from eos.tasks.task_manager import TaskManager
+
+log.set_level("INFO")
+
+
+def load_test_config(config_name):
+ config_path = Path(__file__).resolve().parent / "test_config.yaml"
+
+ if not config_path.exists():
+ raise FileNotFoundError(f"Test config file not found at {config_path}")
+
+ with Path(config_path).open("r") as file:
+ config = yaml.safe_load(file)
+
+ if config_name not in config:
+ raise KeyError(f"Config key {config_name} not found in test config file")
+
+ return config.get(config_name)
+
+
+@pytest.fixture(scope="session")
+def configuration_manager():
+ config = load_test_config("configuration_manager")
+ root_dir = Path(__file__).resolve().parent.parent
+ user_dir = root_dir / config["user_dir"]
+ os.chdir(root_dir)
+ return ConfigurationManager(user_dir=str(user_dir))
+
+
+@pytest.fixture(scope="session")
+def task_specification_registry(configuration_manager):
+ return configuration_manager.task_specs
+
+
+@pytest.fixture
+def user_dir():
+ config = load_test_config("configuration_manager")
+ root_dir = Path(__file__).resolve().parent.parent
+ return root_dir / config["user_dir"]
+
+
+@pytest.fixture(scope="session")
+def db_manager():
+ config = load_test_config("db_manager")
+
+ db_credentials_config = config["db_credentials"]
+ db_credentials = ServiceCredentials(
+ host=db_credentials_config["host"],
+ port=db_credentials_config["port"],
+ username=db_credentials_config["username"],
+ password=db_credentials_config["password"],
+ )
+
+ return DbManager(db_credentials, "test-eos")
+
+
+@pytest.fixture(scope="session")
+def file_db_manager(db_manager):
+ config = load_test_config("file_db_manager")
+
+ file_db_credentials_config = config["file_db_credentials"]
+ file_db_credentials = ServiceCredentials(
+ host=file_db_credentials_config["host"],
+ port=file_db_credentials_config["port"],
+ username=file_db_credentials_config["username"],
+ password=file_db_credentials_config["password"],
+ )
+
+ return FileDbManager(file_db_credentials, bucket_name="test-eos")
+
+
+@pytest.fixture
+def setup_lab_experiment(request, configuration_manager, db_manager):
+ lab_name, experiment_name = request.param
+
+ if lab_name not in configuration_manager.labs:
+ configuration_manager.load_lab(lab_name)
+ lab_config = configuration_manager.labs[lab_name]
+
+ if experiment_name not in configuration_manager.experiments:
+ configuration_manager.load_experiment(experiment_name)
+ experiment_config = configuration_manager.experiments[experiment_name]
+
+ return lab_config, experiment_config
+
+
+@pytest.fixture
+def experiment_graph(setup_lab_experiment):
+ _, experiment_config = setup_lab_experiment
+
+ return ExperimentGraph(
+ experiment_config,
+ )
+
+
+@pytest.fixture
+def clean_db(db_manager):
+ print("Cleaned up DB.")
+ db_manager.clean_db()
+
+
+@pytest.fixture
+def container_manager(setup_lab_experiment, configuration_manager, db_manager, clean_db):
+ return ContainerManager(configuration_manager, db_manager)
+
+
+@pytest.fixture
+def device_manager(setup_lab_experiment, configuration_manager, db_manager, ray_cluster, clean_db):
+ device_manager = DeviceManager(configuration_manager, db_manager)
+ device_manager.update_devices(loaded_labs=set(configuration_manager.labs.keys()))
+ yield device_manager
+ device_manager.cleanup_device_actors()
+
+
+@pytest.fixture
+def experiment_manager(setup_lab_experiment, configuration_manager, db_manager, clean_db):
+ return ExperimentManager(configuration_manager, db_manager)
+
+
+@pytest.fixture
+def container_allocator(setup_lab_experiment, configuration_manager, db_manager, clean_db):
+ return ContainerAllocationManager(configuration_manager, db_manager)
+
+
+@pytest.fixture
+def device_allocator(setup_lab_experiment, configuration_manager, db_manager, clean_db):
+ return DeviceAllocationManager(configuration_manager, db_manager)
+
+
+@pytest.fixture
+def resource_allocation_manager(setup_lab_experiment, configuration_manager, db_manager, clean_db):
+ return ResourceAllocationManager(configuration_manager, db_manager)
+
+
+@pytest.fixture
+def task_manager(setup_lab_experiment, configuration_manager, db_manager, file_db_manager, clean_db):
+ return TaskManager(configuration_manager, db_manager, file_db_manager)
+
+
+@pytest.fixture(scope="module")
+def ray_cluster():
+ ray.init(namespace="test-eos", ignore_reinit_error=True, resources={"eos-core": 1})
+ yield
+ ray.shutdown()
+
+
+@pytest.fixture
+def task_executor(
+ setup_lab_experiment,
+ task_manager,
+ device_manager,
+ container_manager,
+ resource_allocation_manager,
+ configuration_manager,
+):
+ return TaskExecutor(
+ task_manager, device_manager, container_manager, resource_allocation_manager, configuration_manager
+ )
+
+
+@pytest.fixture
+def basic_scheduler(
+ setup_lab_experiment,
+ configuration_manager,
+ experiment_manager,
+ task_manager,
+ device_manager,
+ resource_allocation_manager,
+):
+ return BasicScheduler(
+ configuration_manager, experiment_manager, task_manager, device_manager, resource_allocation_manager
+ )
+
+
+@pytest.fixture
+def experiment_executor(
+ request,
+ experiment_manager,
+ task_manager,
+ container_manager,
+ task_executor,
+ basic_scheduler,
+ experiment_graph,
+):
+ experiment_id, experiment_type = request.param
+
+ return ExperimentExecutor(
+ experiment_id=experiment_id,
+ experiment_type=experiment_type,
+ execution_parameters=ExperimentExecutionParameters(),
+ experiment_graph=experiment_graph,
+ experiment_manager=experiment_manager,
+ task_manager=task_manager,
+ container_manager=container_manager,
+ task_executor=task_executor,
+ scheduler=basic_scheduler,
+ )
+
+
+@pytest.fixture
+def experiment_executor_factory(
+ configuration_manager,
+ experiment_manager,
+ task_manager,
+ container_manager,
+ task_executor,
+ basic_scheduler,
+):
+ return ExperimentExecutorFactory(
+ configuration_manager=configuration_manager,
+ experiment_manager=experiment_manager,
+ task_manager=task_manager,
+ container_manager=container_manager,
+ task_executor=task_executor,
+ scheduler=basic_scheduler,
+ )
+
+
+@pytest.fixture
+def campaign_manager(
+ configuration_manager,
+ db_manager,
+):
+ return CampaignManager(configuration_manager, db_manager)
+
+
+@pytest.fixture
+def campaign_optimizer_manager(
+ db_manager,
+):
+ return CampaignOptimizerManager(db_manager)
+
+
+@pytest.fixture
+def campaign_executor(
+ request,
+ configuration_manager,
+ campaign_manager,
+ campaign_optimizer_manager,
+ task_manager,
+ experiment_executor_factory,
+):
+ campaign_id, experiment_type, max_experiments, do_optimization = request.param
+
+ optimizer_computer_ip = "127.0.0.1"
+
+ execution_parameters = CampaignExecutionParameters(
+ max_experiments=max_experiments,
+ max_concurrent_experiments=1,
+ do_optimization=do_optimization,
+ optimizer_computer_ip=optimizer_computer_ip,
+ )
+
+ return CampaignExecutor(
+ campaign_id=campaign_id,
+ experiment_type=experiment_type,
+ campaign_manager=campaign_manager,
+ campaign_optimizer_manager=campaign_optimizer_manager,
+ task_manager=task_manager,
+ experiment_executor_factory=experiment_executor_factory,
+ execution_parameters=execution_parameters,
+ )
diff --git a/tests/test_basic_scheduler.py b/tests/test_basic_scheduler.py
new file mode 100644
index 0000000..d40b045
--- /dev/null
+++ b/tests/test_basic_scheduler.py
@@ -0,0 +1,73 @@
+from tests.fixtures import *
+
+
+@pytest.fixture()
+def experiment_graph(configuration_manager, basic_scheduler):
+ experiment = configuration_manager.experiments["abstract_experiment"]
+ return ExperimentGraph(experiment)
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [("abstract_lab", "abstract_experiment")], indirect=True)
+class TestBasicScheduler:
+ def test_register_experiment(self, basic_scheduler, experiment_graph, configuration_manager):
+ print(configuration_manager.device_specs)
+ basic_scheduler.register_experiment("experiment_1", "abstract_experiment", experiment_graph)
+ assert basic_scheduler._registered_experiments["experiment_1"] == (
+ "abstract_experiment",
+ experiment_graph,
+ )
+
+ def test_unregister_experiment(self, basic_scheduler, experiment_graph):
+ basic_scheduler.register_experiment("experiment_1", "abstract_experiment", experiment_graph)
+ basic_scheduler.unregister_experiment("experiment_1")
+ assert "experiment_1" not in basic_scheduler._registered_experiments
+
+ @pytest.mark.asyncio
+ async def test_correct_schedule(self, basic_scheduler, experiment_graph, experiment_manager, task_manager):
+ def complete_task(task_id, task_type):
+ task_manager.create_task("experiment_1", task_id, task_type, [])
+ task_manager.start_task("experiment_1", task_id)
+ task_manager.complete_task("experiment_1", task_id)
+
+ def get_task_if_exists(tasks, task_id):
+ return next((task for task in tasks if task.id == task_id), None)
+
+ def assert_task(task, task_id, device_lab_id, device_id):
+ assert task.id == task_id
+ assert task.devices[0].lab_id == device_lab_id
+ assert task.devices[0].id == device_id
+
+ def process_and_assert(tasks, expected_tasks):
+ assert len(tasks) == len(expected_tasks)
+ for task_id, device_lab_id, device_id in expected_tasks:
+ task = get_task_if_exists(tasks, task_id)
+ assert_task(task, task_id, device_lab_id, device_id)
+ complete_task(task_id, "Noop")
+
+ experiment_manager.create_experiment("experiment_1", "abstract_experiment")
+ experiment_manager.start_experiment("experiment_1")
+ basic_scheduler.register_experiment("experiment_1", "abstract_experiment", experiment_graph)
+
+ tasks = await basic_scheduler.request_tasks("experiment_1")
+ process_and_assert(tasks, [("A", "abstract_lab", "D2")])
+
+ tasks = await basic_scheduler.request_tasks("experiment_1")
+ process_and_assert(tasks, [("B", "abstract_lab", "D1"), ("C", "abstract_lab", "D3")])
+
+ tasks = await basic_scheduler.request_tasks("experiment_1")
+ process_and_assert(
+ tasks,
+ [("D", "abstract_lab", "D1"), ("E", "abstract_lab", "D3"), ("F", "abstract_lab", "D2")],
+ )
+
+ tasks = await basic_scheduler.request_tasks("experiment_1")
+ process_and_assert(tasks, [("G", "abstract_lab", "D5")])
+
+ tasks = await basic_scheduler.request_tasks("experiment_1")
+ process_and_assert(tasks, [("H", "abstract_lab", "D6")])
+
+ assert basic_scheduler.is_experiment_completed("experiment_1")
+
+ tasks = await basic_scheduler.request_tasks("experiment_1")
+ assert len(tasks) == 0
+ experiment_manager.complete_experiment("experiment_1")
diff --git a/tests/test_bayesian_sequential_optimizer.py b/tests/test_bayesian_sequential_optimizer.py
new file mode 100644
index 0000000..73632ea
--- /dev/null
+++ b/tests/test_bayesian_sequential_optimizer.py
@@ -0,0 +1,78 @@
+import pandas as pd
+from bofire.data_models.acquisition_functions.acquisition_function import qNEI, qNEHVI
+from bofire.data_models.enum import SamplingMethodEnum
+from bofire.data_models.features.continuous import ContinuousInput, ContinuousOutput
+from bofire.data_models.objectives.identity import MaximizeObjective, MinimizeObjective
+
+from eos.optimization.sequential_bayesian_optimizer import BayesianSequentialOptimizer
+
+
+class TestCampaignBayesianOptimizer:
+ def test_single_objective_optimization(self):
+ optimizer = BayesianSequentialOptimizer(
+ inputs=[
+ ContinuousInput(key="x", bounds=(0, 7)),
+ ],
+ outputs=[ContinuousOutput(key="y", objective=MaximizeObjective(w=1.0))],
+ constraints=[],
+ acquisition_function=qNEI(),
+ num_initial_samples=5,
+ initial_sampling_method=SamplingMethodEnum.SOBOL,
+ )
+
+ for _ in range(8):
+ parameters = optimizer.sample()
+ results = pd.DataFrame()
+ results["y"] = -((parameters["x"] - 2) ** 2) + 4
+ optimizer.report(parameters, results)
+
+ optimal_solutions = optimizer.get_optimal_solutions()
+ assert len(optimal_solutions) == 1
+ assert abs(optimal_solutions["y"].to_numpy()[0] - 4) < 0.01
+
+ def test_competing_multi_objective_optimization(self):
+ optimizer = BayesianSequentialOptimizer(
+ inputs=[
+ ContinuousInput(key="x", bounds=(0, 7)),
+ ],
+ outputs=[
+ ContinuousOutput(key="y1", objective=MaximizeObjective(w=1.0)),
+ ContinuousOutput(key="y2", objective=MinimizeObjective(w=1.0)),
+ ],
+ constraints=[],
+ acquisition_function=qNEHVI(),
+ num_initial_samples=10,
+ initial_sampling_method=SamplingMethodEnum.SOBOL,
+ )
+
+ for _ in range(30):
+ parameters = optimizer.sample()
+ results = pd.DataFrame()
+ results["y1"] = -((parameters["x"] - 2) ** 2) + 4 # Objective 1: Maximize y1
+ results["y2"] = (parameters["x"] - 5) ** 2 # Objective 2: Minimize y2
+ optimizer.report(parameters, results)
+
+ optimal_solutions = optimizer.get_optimal_solutions()
+ print()
+ pd.set_option("display.max_rows", None, "display.max_columns", None)
+ print(optimal_solutions)
+
+ # Ensure the solutions are non-dominated and belong to the Pareto front
+ for i, solution_i in optimal_solutions.iterrows():
+ for j, solution_j in optimal_solutions.iterrows():
+ if i != j:
+ assert not (
+ (solution_i["y1"] <= solution_j["y1"] and solution_i["y2"] >= solution_j["y2"])
+ and (solution_i["y1"] < solution_j["y1"] or solution_i["y2"] > solution_j["y2"])
+ )
+
+ # Verify solutions are close to the true Pareto front
+ true_pareto_front = [{"x": 2, "y1": 4, "y2": 9}, {"x": 5, "y1": -5, "y2": 0}]
+
+ for true_solution in true_pareto_front:
+ assert any(
+ abs(solution["x"] - true_solution["x"]) < 0.5
+ and abs(solution["y1"] - true_solution["y1"]) < 0.5
+ and abs(solution["y2"] - true_solution["y2"]) < 0.5
+ for _, solution in optimal_solutions.iterrows()
+ )
diff --git a/tests/test_campaign_executor.py b/tests/test_campaign_executor.py
new file mode 100644
index 0000000..2a2a2a4
--- /dev/null
+++ b/tests/test_campaign_executor.py
@@ -0,0 +1,45 @@
+import asyncio
+
+from eos.campaigns.entities.campaign import CampaignStatus
+from tests.fixtures import *
+
+LAB_ID = "multiplication_lab"
+CAMPAIGN_ID = "optimize_multiplication_campaign"
+EXPERIMENT_TYPE = "optimize_multiplication"
+MAX_EXPERIMENTS = 40
+DO_OPTIMIZATION = True
+
+
+@pytest.mark.parametrize(
+ "setup_lab_experiment",
+ [(LAB_ID, EXPERIMENT_TYPE)],
+ indirect=True,
+)
+@pytest.mark.parametrize(
+ "campaign_executor",
+ [(CAMPAIGN_ID, EXPERIMENT_TYPE, MAX_EXPERIMENTS, DO_OPTIMIZATION)],
+ indirect=True,
+)
+class TestCampaignExecutor:
+ @pytest.mark.asyncio
+ async def test_start_campaign(self, campaign_executor, campaign_manager):
+ await campaign_executor.start_campaign()
+
+ campaign = campaign_manager.get_campaign(CAMPAIGN_ID)
+ assert campaign is not None
+ assert campaign.id == CAMPAIGN_ID
+ assert campaign.status == CampaignStatus.RUNNING
+
+ @pytest.mark.asyncio
+ async def test_progress_campaign(self, campaign_executor, campaign_manager, campaign_optimizer_manager):
+ await campaign_executor.start_campaign()
+
+ campaign_finished = False
+ while not campaign_finished:
+ campaign_finished = await campaign_executor.progress_campaign()
+ await asyncio.sleep(0.1)
+
+ solutions = await campaign_executor.optimizer.get_optimal_solutions.remote()
+ assert not solutions.empty
+ assert len(solutions) == 1
+ assert solutions["compute_multiplication_objective.objective"].iloc[0] / 100 <= 80
diff --git a/tests/test_config.yaml b/tests/test_config.yaml
new file mode 100644
index 0000000..3fe82e7
--- /dev/null
+++ b/tests/test_config.yaml
@@ -0,0 +1,16 @@
+configuration_manager:
+ user_dir: tests/user
+
+db_manager:
+ db_credentials:
+ host: localhost
+ port: 27017
+ username: eos-user
+ password: eos-password
+
+file_db_manager:
+ file_db_credentials:
+ host: localhost
+ port: 9004
+ username: eos-user
+ password: eos-password
diff --git a/tests/test_configuration_manager.py b/tests/test_configuration_manager.py
new file mode 100644
index 0000000..701c913
--- /dev/null
+++ b/tests/test_configuration_manager.py
@@ -0,0 +1,131 @@
+import copy
+import shutil
+import tempfile
+
+from eos.configuration.constants import TASK_IMPLEMENTATION_FILE_NAME, LAB_CONFIG_FILE_NAME
+from eos.configuration.exceptions import (
+ EosMissingConfigurationError,
+ EosConfigurationError,
+)
+from tests.fixtures import *
+
+LAB_1_ID = "small_lab"
+LAB_2_ID = "multiplication_lab"
+
+
+class TestConfigurationManager:
+
+ def test_load_lab(self, configuration_manager):
+ initial_labs = configuration_manager.labs
+ configuration_manager.load_lab(LAB_1_ID)
+
+ assert LAB_1_ID in configuration_manager.labs
+
+ expected_labs = copy.deepcopy(initial_labs)
+ expected_labs[LAB_1_ID] = configuration_manager.labs[LAB_1_ID]
+
+ assert configuration_manager.labs == expected_labs
+
+ def test_load_labs(self, configuration_manager):
+ initial_labs = configuration_manager.labs
+ configuration_manager.load_labs([LAB_1_ID, LAB_2_ID])
+
+ assert LAB_1_ID in configuration_manager.labs
+ assert LAB_2_ID in configuration_manager.labs
+
+ expected_labs = copy.deepcopy(initial_labs)
+ expected_labs[LAB_1_ID] = configuration_manager.labs[LAB_1_ID]
+ expected_labs[LAB_2_ID] = configuration_manager.labs[LAB_2_ID]
+
+ assert configuration_manager.labs == expected_labs
+
+ def test_load_nonexistent_lab(self, configuration_manager):
+ initial_labs = configuration_manager.labs
+ with pytest.raises(EosMissingConfigurationError):
+ configuration_manager.load_lab("nonexistent_lab")
+
+ assert configuration_manager.labs == initial_labs
+
+ def test_unload_lab(self, configuration_manager):
+ configuration_manager.load_lab(LAB_1_ID)
+ configuration_manager.load_lab(LAB_2_ID)
+ configuration_manager.load_experiment("water_purification")
+
+ expected_labs = copy.deepcopy(configuration_manager.labs)
+ expected_experiments = copy.deepcopy(configuration_manager.experiments)
+ configuration_manager.unload_lab(LAB_1_ID)
+
+ assert LAB_1_ID not in configuration_manager.labs
+ assert "water_purification" not in configuration_manager.experiments
+
+ expected_labs.pop(LAB_1_ID)
+ assert configuration_manager.labs == expected_labs
+
+ expected_experiments.pop("water_purification")
+ assert configuration_manager.experiments == expected_experiments
+
+ def test_unload_nonexistent_lab(self, configuration_manager):
+ configuration_manager.load_lab(LAB_1_ID)
+ configuration_manager.load_lab(LAB_2_ID)
+
+ with pytest.raises(EosConfigurationError):
+ configuration_manager.unload_lab("nonexistent_lab")
+
+ def test_load_experiment(self, configuration_manager):
+ configuration_manager.load_lab(LAB_1_ID)
+
+ initial_experiments = configuration_manager.experiments
+ configuration_manager.load_experiment("water_purification")
+ assert "water_purification" in configuration_manager.experiments
+
+ expected_experiments = copy.deepcopy(initial_experiments)
+ expected_experiments["water_purification"] = configuration_manager.experiments["water_purification"]
+
+ assert configuration_manager.experiments == expected_experiments
+
+ def test_load_nonexistent_experiment(self, configuration_manager):
+ configuration_manager.load_lab(LAB_1_ID)
+
+ initial_experiments = configuration_manager.experiments
+ with pytest.raises(EosMissingConfigurationError):
+ configuration_manager.load_experiment("nonexistent_experiment")
+
+ assert configuration_manager.experiments == initial_experiments
+
+ def test_unload_experiment(self, configuration_manager):
+ configuration_manager.load_lab(LAB_1_ID)
+
+ if "water_purification" not in configuration_manager.experiments:
+ configuration_manager.load_experiment("water_purification")
+
+ expected_experiments = copy.deepcopy(configuration_manager.experiments)
+ configuration_manager.unload_experiment("water_purification")
+
+ assert "water_purification" not in configuration_manager.experiments
+ expected_experiments.pop("water_purification")
+ assert configuration_manager.experiments == expected_experiments
+
+ def test_unload_nonexistent_experiment(self, configuration_manager):
+ configuration_manager.load_lab(LAB_1_ID)
+ with pytest.raises(EosConfigurationError):
+ configuration_manager.unload_experiment("nonexistent_experiment")
+
+ def test_user_dir_lab_file_existence(self, user_dir):
+ with tempfile.TemporaryDirectory(prefix="eos_test-") as temp_user_dir:
+ temp_user_dir_path = Path(temp_user_dir)
+ shutil.copytree(user_dir, temp_user_dir_path, dirs_exist_ok=True)
+
+ (temp_user_dir_path / "testing" / "labs" / LAB_1_ID / LAB_CONFIG_FILE_NAME).unlink()
+
+ with pytest.raises(EosMissingConfigurationError):
+ ConfigurationManager(user_dir=str(temp_user_dir_path))
+
+ def test_tasks_dir_task_handler_existence(self, user_dir):
+ with tempfile.TemporaryDirectory(prefix="eos_test-") as temp_user_dir:
+ shutil.copytree(user_dir, temp_user_dir, dirs_exist_ok=True)
+
+ temp_tasks_dir_path = Path(temp_user_dir) / "testing" / "tasks"
+ (temp_tasks_dir_path / "noop" / TASK_IMPLEMENTATION_FILE_NAME).unlink()
+
+ with pytest.raises(EosMissingConfigurationError):
+ ConfigurationManager(user_dir=str(temp_user_dir))
diff --git a/tests/test_container_allocator.py b/tests/test_container_allocator.py
new file mode 100644
index 0000000..acd1bde
--- /dev/null
+++ b/tests/test_container_allocator.py
@@ -0,0 +1,135 @@
+from eos.resource_allocation.exceptions import (
+ EosContainerAllocatedError,
+ EosContainerNotFoundError,
+)
+from tests.fixtures import *
+
+
+@pytest.mark.parametrize(
+ "setup_lab_experiment", [("small_lab", "water_purification")], indirect=True
+)
+class TestContainerAllocator:
+ def test_allocate_container(self, container_allocator):
+ container_id = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_allocator.allocate(container_id, "owner", "water_purification_1")
+ container = container_allocator.get_allocation(container_id)
+
+ assert container.id == container_id
+ assert container.owner == "owner"
+ assert container.experiment_id == "water_purification_1"
+
+ def test_allocate_container_already_allocated(self, container_allocator):
+ container_id = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_allocator.allocate(container_id, "owner", "water_purification_1")
+
+ with pytest.raises(EosContainerAllocatedError):
+ container_allocator.allocate(container_id, "owner", "water_purification_1")
+
+ def test_allocate_nonexistent_container(self, container_allocator):
+ container_id = "nonexistent_container_id"
+ with pytest.raises(EosContainerNotFoundError):
+ container_allocator.allocate(container_id, "owner", "water_purification_1")
+
+ def test_deallocate_container(self, container_allocator):
+ container_id = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_allocator.allocate(container_id, "owner", "water_purification_1")
+
+ container_allocator.deallocate(container_id)
+ container = container_allocator.get_allocation(container_id)
+
+ assert container is None
+
+ def test_deallocate_container_not_allocated(self, container_allocator):
+ container_id = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_allocator.deallocate(container_id)
+ assert container_allocator.get_allocation(container_id) is None
+
+ def test_is_allocated(self, container_allocator):
+ container_id = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ assert not container_allocator.is_allocated(container_id)
+
+ container_allocator.allocate(container_id, "owner", "water_purification_1")
+ assert container_allocator.is_allocated(container_id)
+
+ def test_get_allocations_by_owner(self, container_allocator):
+ container_id_1 = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_id_2 = "84eb17d61e884ffd9d1fdebcbad1532b"
+ container_id_3 = "a3b958aea8bd435386cdcbab20a2d3ec"
+
+ container_allocator.allocate(container_id_1, "owner", "water_purification_1")
+ container_allocator.allocate(container_id_2, "owner", "water_purification_1")
+ container_allocator.allocate(container_id_3, "another_owner", "water_purification_1")
+
+ allocations = container_allocator.get_allocations(owner="owner")
+ assert allocations[0].id == container_id_1
+ assert allocations[1].id == container_id_2
+ assert len(allocations) == 2
+
+ allocations = container_allocator.get_allocations(owner="another_owner")
+ assert allocations[0].id == container_id_3
+ assert len(allocations) == 1
+
+ def test_get_all_allocations(self, container_allocator):
+ container_id_1 = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_id_2 = "84eb17d61e884ffd9d1fdebcbad1532b"
+ container_id_3 = "a3b958aea8bd435386cdcbab20a2d3ec"
+
+ container_allocator.allocate(container_id_1, "owner", "water_purification_1")
+ container_allocator.allocate(container_id_2, "owner", "water_purification_1")
+ container_allocator.allocate(container_id_3, "another_owner", "water_purification_1")
+
+ allocations = container_allocator.get_allocations()
+ assert len(allocations) == 3
+ assert {allocation.id for allocation in allocations} == {
+ container_id_1,
+ container_id_2,
+ container_id_3,
+ }
+
+ def test_get_all_unallocated_containers(self, container_allocator):
+ container_id_1 = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_id_2 = "84eb17d61e884ffd9d1fdebcbad1532b"
+ container_id_3 = "a3b958aea8bd435386cdcbab20a2d3ec"
+
+ initial_unallocated_containers = container_allocator.get_all_unallocated()
+
+ container_allocator.allocate(container_id_1, "owner1", "water_purification_1")
+ container_allocator.allocate(container_id_2, "owner2", "water_purification_1")
+
+ new_unallocated_containers = container_allocator.get_all_unallocated()
+ assert len(new_unallocated_containers) == len(initial_unallocated_containers) - 2
+ assert container_id_1 not in new_unallocated_containers
+ assert container_id_2 not in new_unallocated_containers
+ assert container_id_3 in new_unallocated_containers
+
+ def test_deallocate_all_containers(self, container_allocator):
+ container_id_1 = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_id_2 = "84eb17d61e884ffd9d1fdebcbad1532b"
+ container_id_3 = "a3b958aea8bd435386cdcbab20a2d3ec"
+
+ container_allocator.allocate(container_id_1, "owner1", "water_purification_1")
+ container_allocator.allocate(container_id_2, "owner2", "water_purification_1")
+ container_allocator.allocate(container_id_3, "owner3", "water_purification_1")
+
+ assert container_allocator.get_allocations() != []
+
+ container_allocator.deallocate_all()
+
+ assert container_allocator.get_allocations() == []
+
+ def test_deallocate_all_containers_by_owner(self, container_allocator):
+ container_id_1 = "ec1ca48cd5d14c0c8cde376476e0d98d"
+ container_id_2 = "84eb17d61e884ffd9d1fdebcbad1532b"
+ container_id_3 = "a3b958aea8bd435386cdcbab20a2d3ec"
+
+ container_allocator.allocate(container_id_1, "owner1", "water_purification_1")
+ container_allocator.allocate(container_id_2, "owner2", "water_purification_1")
+ container_allocator.allocate(container_id_3, "owner2", "water_purification_1")
+
+ container_allocator.deallocate_all_by_owner("owner2")
+
+ owner2_allocations = container_allocator.get_allocations(owner="owner2")
+ assert owner2_allocations == []
+ assert container_allocator.get_allocations() == [
+ container_allocator.get_allocation(container_id_1)
+ ]
diff --git a/tests/test_container_manager.py b/tests/test_container_manager.py
new file mode 100644
index 0000000..3a7522d
--- /dev/null
+++ b/tests/test_container_manager.py
@@ -0,0 +1,46 @@
+from tests.fixtures import *
+
+
+@pytest.fixture
+def container_manager(configuration_manager, setup_lab_experiment, db_manager, clean_db):
+ return ContainerManager(configuration_manager, db_manager)
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [("small_lab", "water_purification")], indirect=True)
+class TestContainerManager:
+ def test_set_container_location(self, container_manager):
+ container_id = "acf829f859e04fee80d54a1ee918555d"
+ container_manager.set_location(container_id, "new_location")
+
+ assert container_manager.get_container(container_id).location == "new_location"
+
+ def test_set_container_lab(self, container_manager):
+ container_id = "acf829f859e04fee80d54a1ee918555d"
+ container_manager.set_lab(container_id, "new_lab")
+
+ assert container_manager.get_container(container_id).lab == "new_lab"
+
+ def test_set_container_metadata(self, container_manager):
+ container_id = "acf829f859e04fee80d54a1ee918555d"
+ container_manager.set_metadata(container_id, {"substance": "water"})
+ container_manager.set_metadata(container_id, {"temperature": "cold"})
+
+ assert container_manager.get_container(container_id).metadata == {"temperature": "cold"}
+
+ def test_add_container_metadata(self, container_manager):
+ container_id = "acf829f859e04fee80d54a1ee918555d"
+ container_manager.add_metadata(container_id, {"substance": "water"})
+ container_manager.add_metadata(container_id, {"temperature": "cold"})
+
+ assert container_manager.get_container(container_id).metadata == {
+ "capacity": 500,
+ "substance": "water",
+ "temperature": "cold",
+ }
+
+ def test_remove_container_metadata(self, container_manager):
+ container_id = "acf829f859e04fee80d54a1ee918555d"
+ container_manager.add_metadata(container_id, {"substance": "water", "temperature": "cold", "color": "blue"})
+ container_manager.remove_metadata(container_id, ["color", "temperature"])
+
+ assert container_manager.get_container(container_id).metadata == {"capacity": 500, "substance": "water"}
diff --git a/tests/test_device_allocator.py b/tests/test_device_allocator.py
new file mode 100644
index 0000000..9d69da4
--- /dev/null
+++ b/tests/test_device_allocator.py
@@ -0,0 +1,135 @@
+from eos.resource_allocation.exceptions import (
+ EosDeviceAllocatedError,
+ EosDeviceNotFoundError,
+)
+from tests.fixtures import *
+
+LAB_ID = "small_lab"
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [(LAB_ID, "water_purification")], indirect=True)
+class TestDeviceAllocator:
+ def test_allocate_device(self, device_allocator):
+ device_id = "magnetic_mixer"
+ device_allocator.allocate(LAB_ID, device_id, "owner", "water_purification_1")
+
+ allocation = device_allocator.get_allocation(LAB_ID, device_id)
+
+ assert allocation.id == device_id
+ assert allocation.lab_id == LAB_ID
+ assert allocation.device_type == "magnetic_mixer"
+ assert allocation.owner == "owner"
+ assert allocation.experiment_id == "water_purification_1"
+
+ def test_allocate_device_already_allocated(self, device_allocator):
+ device_id = "magnetic_mixer"
+ device_allocator.allocate(LAB_ID, device_id, "owner", "water_purification_1")
+
+ with pytest.raises(EosDeviceAllocatedError):
+ device_allocator.allocate(LAB_ID, device_id, "owner", "water_purification_1")
+
+ def test_allocate_nonexistent_device(self, device_allocator):
+ device_id = "nonexistent_device_id"
+ with pytest.raises(EosDeviceNotFoundError):
+ device_allocator.allocate(LAB_ID, device_id, "owner", "water_purification_1")
+
+ def test_deallocate_device(self, device_allocator):
+ device_id = "magnetic_mixer"
+ device_allocator.allocate(LAB_ID, device_id, "owner", "water_purification_1")
+
+ device_allocator.deallocate(LAB_ID, device_id)
+ allocation = device_allocator.get_allocation(LAB_ID, device_id)
+
+ assert allocation is None
+
+ def test_deallocate_device_not_allocated(self, device_allocator):
+ device_id = "magnetic_mixer"
+ device_allocator.deallocate(LAB_ID, device_id)
+ assert device_allocator.get_allocation(LAB_ID, device_id) is None
+
+ def test_is_allocated(self, device_allocator):
+ device_id = "magnetic_mixer"
+ assert not device_allocator.is_allocated(LAB_ID, device_id)
+
+ device_allocator.allocate(LAB_ID, device_id, "owner", "water_purification_1")
+ assert device_allocator.is_allocated(LAB_ID, device_id)
+
+ def test_get_allocations_by_owner(self, device_allocator):
+ device_id_1 = "magnetic_mixer"
+ device_id_2 = "evaporator"
+ device_id_3 = "substance_fridge"
+
+ device_allocator.allocate(LAB_ID, device_id_1, "owner1", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_2, "owner1", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_3, "owner2", "water_purification_1")
+
+ allocations = device_allocator.get_allocations(owner="owner1")
+
+ assert len(allocations) == 2
+ assert device_id_1 in [allocation.id for allocation in allocations]
+ assert device_id_2 in [allocation.id for allocation in allocations]
+
+ def test_get_all_allocations(self, device_allocator):
+ device_id_1 = "magnetic_mixer"
+ device_id_2 = "evaporator"
+ device_id_3 = "substance_fridge"
+
+ device_allocator.allocate(LAB_ID, device_id_1, "owner", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_2, "owner", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_3, "owner", "water_purification_1")
+
+ allocations = device_allocator.get_allocations()
+
+ assert len(allocations) == 3
+ assert device_id_1 in [allocation.id for allocation in allocations]
+ assert device_id_2 in [allocation.id for allocation in allocations]
+ assert device_id_3 in [allocation.id for allocation in allocations]
+
+ def test_get_all_unallocated(self, device_allocator):
+ device_id_1 = "magnetic_mixer"
+ device_id_2 = "evaporator"
+ device_id_3 = "substance_fridge"
+
+ initial_unallocated_devices = device_allocator.get_all_unallocated()
+
+ device_allocator.allocate(LAB_ID, device_id_1, "owner", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_2, "owner", "water_purification_1")
+
+ new_unallocated_devices = device_allocator.get_all_unallocated()
+
+ assert len(new_unallocated_devices) == len(initial_unallocated_devices) - 2
+ assert device_id_1 not in new_unallocated_devices
+ assert device_id_2 not in new_unallocated_devices
+ assert device_id_3 in new_unallocated_devices
+
+ def test_deallocate_all(self, device_allocator):
+ device_id_1 = "magnetic_mixer"
+ device_id_2 = "evaporator"
+ device_id_3 = "substance_fridge"
+
+ device_allocator.allocate(LAB_ID, device_id_1, "owner", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_2, "owner", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_3, "owner", "water_purification_1")
+
+ assert device_allocator.get_allocations() != []
+
+ device_allocator.deallocate_all()
+
+ assert device_allocator.get_allocations() == []
+
+ def test_deallocate_all_by_owner(self, device_allocator):
+ device_id_1 = "magnetic_mixer"
+ device_id_2 = "evaporator"
+ device_id_3 = "substance_fridge"
+
+ device_allocator.allocate(LAB_ID, device_id_1, "owner1", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_2, "owner2", "water_purification_1")
+ device_allocator.allocate(LAB_ID, device_id_3, "owner2", "water_purification_1")
+
+ device_allocator.deallocate_all_by_owner("owner2")
+
+ owner2_allocations = device_allocator.get_allocations(owner="owner2")
+ assert owner2_allocations == []
+ assert device_allocator.get_allocations() == [
+ device_allocator.get_allocation(LAB_ID, device_id_1)
+ ]
diff --git a/tests/test_device_manager.py b/tests/test_device_manager.py
new file mode 100644
index 0000000..c281257
--- /dev/null
+++ b/tests/test_device_manager.py
@@ -0,0 +1,37 @@
+from eos.devices.entities.device import DeviceStatus
+from eos.devices.exceptions import EosDeviceStateError
+from tests.fixtures import *
+
+LAB_ID = "small_lab"
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [(LAB_ID, "water_purification")], indirect=True)
+class TestDeviceManager:
+ def test_get_device(self, device_manager):
+ device = device_manager.get_device(LAB_ID, "substance_fridge")
+ assert device.id == "substance_fridge"
+ assert device.lab_id == LAB_ID
+ assert device.type == "fridge"
+ assert device.location == "substance_fridge"
+
+ def test_get_device_nonexistent(self, device_manager):
+ device = device_manager.get_device(LAB_ID, "nonexistent_device")
+ assert device is None
+
+ def test_get_all_devices(self, device_manager):
+ devices = device_manager.get_devices(lab_id=LAB_ID)
+ assert len(devices) == 5
+
+ def test_get_devices_by_type(self, device_manager):
+ devices = device_manager.get_devices(lab_id=LAB_ID, type="magnetic_mixer")
+ assert len(devices) == 2
+ assert all(device.type == "magnetic_mixer" for device in devices)
+
+ def test_set_device_status(self, device_manager):
+ device_manager.set_device_status(LAB_ID, "evaporator", DeviceStatus.ACTIVE)
+ device = device_manager.get_device(LAB_ID, "evaporator")
+ assert device.status == DeviceStatus.ACTIVE
+
+ def test_set_device_status_nonexistent(self, device_manager):
+ with pytest.raises(EosDeviceStateError):
+ device_manager.set_device_status(LAB_ID, "nonexistent_device", DeviceStatus.INACTIVE)
diff --git a/tests/test_experiment_executor.py b/tests/test_experiment_executor.py
new file mode 100644
index 0000000..0e12f44
--- /dev/null
+++ b/tests/test_experiment_executor.py
@@ -0,0 +1,103 @@
+import asyncio
+
+from eos.experiments.entities.experiment import ExperimentStatus
+from eos.tasks.entities.task import TaskStatus
+from tests.fixtures import *
+
+LAB_ID = "small_lab"
+EXPERIMENT_TYPE = "water_purification"
+EXPERIMENT_ID = "water_purification_#1"
+
+DYNAMIC_PARAMETERS = {
+ "mixing": {
+ "time": 120,
+ },
+ "evaporation": {
+ "evaporation_temperature": 120,
+ "evaporation_rotation_speed": 200,
+ "evaporation_sparging_flow": 5,
+ },
+}
+
+
+@pytest.mark.parametrize(
+ "setup_lab_experiment",
+ [(LAB_ID, EXPERIMENT_TYPE)],
+ indirect=True,
+)
+@pytest.mark.parametrize(
+ "experiment_executor",
+ [(EXPERIMENT_ID, EXPERIMENT_TYPE)],
+ indirect=True,
+)
+class TestExperimentExecutor:
+ def test_start_experiment(self, experiment_executor, experiment_manager):
+ experiment_executor.start_experiment(DYNAMIC_PARAMETERS)
+
+ experiment = experiment_manager.get_experiment(EXPERIMENT_ID)
+ assert experiment is not None
+ assert experiment.id == EXPERIMENT_ID
+ assert experiment.status == ExperimentStatus.RUNNING
+
+ @pytest.mark.asyncio
+ async def test_progress_experiment(self, experiment_executor, experiment_manager, task_manager):
+ experiment_executor.start_experiment(DYNAMIC_PARAMETERS)
+
+ experiment_completed = await experiment_executor.progress_experiment()
+ assert not experiment_completed
+ await experiment_executor._task_output_futures["mixing"]
+
+ experiment_completed = await experiment_executor.progress_experiment()
+ assert not experiment_completed
+ task = task_manager.get_task(EXPERIMENT_ID, "mixing")
+ assert task is not None
+ assert task.status == TaskStatus.COMPLETED
+ await experiment_executor._task_output_futures["evaporation"]
+
+ experiment_completed = await experiment_executor.progress_experiment()
+ task = task_manager.get_task(EXPERIMENT_ID, "evaporation")
+ assert task.status == TaskStatus.COMPLETED
+ assert not experiment_completed
+
+ # Final progress
+ experiment_completed = await experiment_executor.progress_experiment()
+ assert experiment_completed
+ experiment = experiment_manager.get_experiment(EXPERIMENT_ID)
+ assert experiment.status == ExperimentStatus.COMPLETED
+
+ @pytest.mark.asyncio
+ async def test_task_output_registration(self, experiment_executor, task_manager):
+ experiment_executor.start_experiment(DYNAMIC_PARAMETERS)
+
+ experiment_completed = False
+ while not experiment_completed:
+ experiment_completed = await experiment_executor.progress_experiment()
+ await asyncio.sleep(0.1)
+
+ mixing_output = task_manager.get_task_output(EXPERIMENT_ID, "mixing")
+ assert mixing_output is not None
+ assert mixing_output.parameters["mixing_time"] == DYNAMIC_PARAMETERS["mixing"]["time"]
+
+ @pytest.mark.asyncio
+ async def test_resolve_input_parameter_references_and_dynamic_parameters(
+ self, experiment_executor, task_manager
+ ):
+ experiment_executor.start_experiment(DYNAMIC_PARAMETERS)
+
+ experiment_completed = False
+ while not experiment_completed:
+ experiment_completed = await experiment_executor.progress_experiment()
+ await asyncio.sleep(0.1)
+
+ mixing_task = task_manager.get_task(EXPERIMENT_ID, "mixing")
+ mixing_result = task_manager.get_task_output(EXPERIMENT_ID, "mixing")
+
+ evaporation_task = task_manager.get_task(EXPERIMENT_ID, "evaporation")
+ # Check the dynamic parameter for input mixing time
+ assert mixing_task.input.parameters["time"] == DYNAMIC_PARAMETERS["mixing"]["time"]
+
+ # Check that the output parameter mixing time was assigned to the input parameter evaporation time
+ assert (
+ evaporation_task.input.parameters["evaporation_time"]
+ == mixing_result.parameters["mixing_time"]
+ )
diff --git a/tests/test_experiment_graph.py b/tests/test_experiment_graph.py
new file mode 100644
index 0000000..fb8c214
--- /dev/null
+++ b/tests/test_experiment_graph.py
@@ -0,0 +1,41 @@
+from tests.fixtures import *
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [("small_lab", "water_purification")], indirect=True)
+class TestExperimentGraph:
+ def test_get_graph(self, experiment_graph):
+ graph = experiment_graph.get_graph()
+ assert graph is not None
+
+ def test_get_task_node(self, experiment_graph):
+ task_node = experiment_graph.get_task_node("mixing")
+ assert task_node is not None
+ assert task_node["node_type"] == "task"
+ assert task_node["task_config"].type == "Magnetic Mixing"
+
+ def test_get_task_spec(self, experiment_graph):
+ task_spec = experiment_graph.get_task_spec("mixing")
+ assert task_spec is not None
+ assert task_spec.type == "Magnetic Mixing"
+
+ def test_get_container_node(self, experiment_graph):
+ container_node = experiment_graph.get_container_node("026749f8f40342b38157f9824ae2f512")
+ assert container_node is not None
+ assert container_node["node_type"] == "container"
+ assert container_node["container"]["beaker"] == "026749f8f40342b38157f9824ae2f512"
+
+ def test_get_task_dependencies(self, experiment_graph):
+ dependencies = experiment_graph.get_task_dependencies("evaporation")
+ assert dependencies == ["mixing"]
+
+ def test_get_task_inputs(self, experiment_graph):
+ inputs = experiment_graph.get_task_inputs("mixing")
+ assert inputs.containers == ["026749f8f40342b38157f9824ae2f512"]
+
+ inputs = experiment_graph.get_task_inputs("evaporation")
+ assert inputs.parameters == ["mixing.mixing_time"]
+
+ def test_get_task_outputs(self, experiment_graph):
+ outputs = experiment_graph.get_task_outputs("mixing")
+ assert outputs.containers == ["026749f8f40342b38157f9824ae2f512_mixing"]
+ assert outputs.parameters == ["mixing.mixing_time"]
diff --git a/tests/test_experiment_manager.py b/tests/test_experiment_manager.py
new file mode 100644
index 0000000..2f297c6
--- /dev/null
+++ b/tests/test_experiment_manager.py
@@ -0,0 +1,93 @@
+from eos.experiments.entities.experiment import ExperimentStatus
+from eos.experiments.exceptions import EosExperimentStateError
+from tests.fixtures import *
+
+EXPERIMENT_ID = "water_purification"
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [("small_lab", EXPERIMENT_ID)], indirect=True)
+class TestExperimentManager:
+ def test_create_experiment(self, experiment_manager):
+ experiment_manager.create_experiment("test_experiment", EXPERIMENT_ID)
+ experiment_manager.create_experiment("test_experiment_2", EXPERIMENT_ID)
+
+ assert experiment_manager.get_experiment("test_experiment").id == "test_experiment"
+ assert experiment_manager.get_experiment("test_experiment_2").id == "test_experiment_2"
+
+ def test_create_experiment_nonexistent_type(self, experiment_manager):
+ with pytest.raises(EosExperimentStateError):
+ experiment_manager.create_experiment("test_experiment", "nonexistent_type")
+
+ def test_create_existing_experiment(self, experiment_manager):
+ experiment_manager.create_experiment("test_experiment", EXPERIMENT_ID)
+
+ with pytest.raises(EosExperimentStateError):
+ experiment_manager.create_experiment("test_experiment", EXPERIMENT_ID)
+
+ def test_delete_experiment(self, experiment_manager):
+ experiment_manager.create_experiment("test_experiment", EXPERIMENT_ID)
+
+ assert experiment_manager.get_experiment("test_experiment").id == "test_experiment"
+
+ experiment_manager.delete_experiment("test_experiment")
+
+ assert experiment_manager.get_experiment("test_experiment") is None
+
+ def test_delete_nonexisting_experiment(self, experiment_manager):
+ with pytest.raises(EosExperimentStateError):
+ experiment_manager.delete_experiment("non_existing_experiment")
+
+ def test_get_experiments_by_status(self, experiment_manager):
+ experiment_manager.create_experiment("test_experiment", EXPERIMENT_ID)
+ experiment_manager.create_experiment("test_experiment_2", EXPERIMENT_ID)
+ experiment_manager.create_experiment("test_experiment_3", EXPERIMENT_ID)
+
+ experiment_manager.start_experiment("test_experiment")
+ experiment_manager.start_experiment("test_experiment_2")
+ experiment_manager.complete_experiment("test_experiment_3")
+
+ running_experiments = experiment_manager.get_experiments(
+ status=ExperimentStatus.RUNNING.value
+ )
+ completed_experiments = experiment_manager.get_experiments(
+ status=ExperimentStatus.COMPLETED.value
+ )
+
+ assert running_experiments == [
+ experiment_manager.get_experiment("test_experiment"),
+ experiment_manager.get_experiment("test_experiment_2"),
+ ]
+
+ assert completed_experiments == [experiment_manager.get_experiment("test_experiment_3")]
+
+ def test_set_experiment_status(self, experiment_manager):
+ experiment_manager.create_experiment("test_experiment", EXPERIMENT_ID)
+ assert (
+ experiment_manager.get_experiment("test_experiment").status == ExperimentStatus.CREATED
+ )
+
+ experiment_manager.start_experiment("test_experiment")
+ assert (
+ experiment_manager.get_experiment("test_experiment").status == ExperimentStatus.RUNNING
+ )
+
+ experiment_manager.complete_experiment("test_experiment")
+ assert (
+ experiment_manager.get_experiment("test_experiment").status
+ == ExperimentStatus.COMPLETED
+ )
+
+ def test_set_experiment_status_nonexistent_experiment(self, experiment_manager):
+ with pytest.raises(EosExperimentStateError):
+ experiment_manager.start_experiment("nonexistent_experiment")
+
+ def test_get_all_experiments(self, experiment_manager):
+ experiment_manager.create_experiment("test_experiment", EXPERIMENT_ID)
+ experiment_manager.create_experiment("test_experiment_2", EXPERIMENT_ID)
+ experiment_manager.create_experiment("test_experiment_3", EXPERIMENT_ID)
+
+ assert experiment_manager.get_experiments() == [
+ experiment_manager.get_experiment("test_experiment"),
+ experiment_manager.get_experiment("test_experiment_2"),
+ experiment_manager.get_experiment("test_experiment_3"),
+ ]
diff --git a/tests/test_lab_validation.py b/tests/test_lab_validation.py
new file mode 100644
index 0000000..0f38a93
--- /dev/null
+++ b/tests/test_lab_validation.py
@@ -0,0 +1,68 @@
+from eos.configuration.entities.lab import LabContainerConfig
+from eos.configuration.exceptions import EosLabConfigurationError
+from eos.configuration.validation.lab_validator import LabValidator
+from tests.fixtures import *
+
+
+@pytest.fixture()
+def lab(configuration_manager):
+ configuration_manager.load_lab("small_lab")
+ return configuration_manager.labs["small_lab"]
+
+
+class TestLabValidation:
+ def test_device_locations(self, configuration_manager, lab):
+ lab.devices.magnetic_mixer.location = "invalid_location"
+
+ with pytest.raises(EosLabConfigurationError):
+ LabValidator(configuration_manager._user_dir, lab).validate()
+
+ def test_container_locations(self, configuration_manager, lab):
+ lab.containers[0].location = "invalid_location"
+
+ with pytest.raises(EosLabConfigurationError):
+ LabValidator(configuration_manager._user_dir, lab).validate()
+
+ def test_device_computers(self, configuration_manager, lab):
+ lab.devices.magnetic_mixer.computer = "invalid_computer"
+
+ with pytest.raises(EosLabConfigurationError):
+ LabValidator(configuration_manager._user_dir, lab).validate()
+
+ def test_container_non_unique_type(self, configuration_manager, lab):
+ lab.containers.extend(
+ [
+ LabContainerConfig(
+ type="beaker",
+ location="substance_shelf",
+ ids=["a", "b"],
+ ),
+ LabContainerConfig(
+ type="beaker",
+ location="substance_shelf",
+ ids=["c", "d"],
+ ),
+ ]
+ )
+
+ with pytest.raises(EosLabConfigurationError):
+ LabValidator(configuration_manager._user_dir, lab).validate()
+
+ def test_container_duplicate_ids(self, configuration_manager, lab):
+ lab.containers.extend(
+ [
+ LabContainerConfig(
+ type="beaker",
+ location="substance_shelf",
+ ids=["a", "b"],
+ ),
+ LabContainerConfig(
+ type="flask",
+ location="substance_shelf",
+ ids=["a", "b"],
+ ),
+ ]
+ )
+
+ with pytest.raises(EosLabConfigurationError):
+ LabValidator(configuration_manager._user_dir, lab).validate()
diff --git a/tests/test_multi_lab_validation.py b/tests/test_multi_lab_validation.py
new file mode 100644
index 0000000..3bb4045
--- /dev/null
+++ b/tests/test_multi_lab_validation.py
@@ -0,0 +1,17 @@
+import copy
+
+from eos.configuration.exceptions import EosLabConfigurationError
+from eos.configuration.validation.multi_lab_validator import MultiLabValidator
+from tests.fixtures import *
+
+
+class TestMultiLabValidation:
+ def test_duplicate_container_ids(self, configuration_manager):
+ configuration_manager.load_lab("small_lab")
+ lab = configuration_manager.labs["small_lab"]
+
+ # Create a deep copy of the lab to simulate two instances
+ lab_copy = copy.deepcopy(lab)
+
+ with pytest.raises(EosLabConfigurationError):
+ MultiLabValidator([lab, lab_copy]).validate()
diff --git a/tests/test_resource_allocation_manager.py b/tests/test_resource_allocation_manager.py
new file mode 100644
index 0000000..cc4a62a
--- /dev/null
+++ b/tests/test_resource_allocation_manager.py
@@ -0,0 +1,216 @@
+from bson import ObjectId
+
+from eos.resource_allocation.entities.resource_request import (
+ ResourceAllocationRequest,
+ ActiveResourceAllocationRequest,
+ ResourceType,
+ ResourceRequestAllocationStatus,
+)
+from eos.resource_allocation.exceptions import EosDeviceNotFoundError
+from tests.fixtures import *
+
+LAB_ID = "small_lab"
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [(LAB_ID, "water_purification")], indirect=True)
+class TestResourceAllocationManager:
+ def test_request_resources(self, resource_allocation_manager):
+ request = ResourceAllocationRequest(
+ requester="test_requester",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ )
+ request.add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+ request.add_resource("026749f8f40342b38157f9824ae2f512", "", ResourceType.CONTAINER)
+
+ def callback(active_request: ActiveResourceAllocationRequest):
+ assert active_request.status == ResourceRequestAllocationStatus.ALLOCATED
+ assert len(active_request.request.resources) == 2
+ assert any(r.id == "magnetic_mixer" for r in active_request.request.resources)
+ assert any(r.id == "026749f8f40342b38157f9824ae2f512" for r in active_request.request.resources)
+
+ active_request = resource_allocation_manager.request_resources(request, callback)
+
+ assert active_request.request == request
+ assert active_request.status == ResourceRequestAllocationStatus.PENDING
+
+ resource_allocation_manager.process_active_requests()
+
+ def test_request_resources_priority(self, resource_allocation_manager):
+ requests = [
+ ResourceAllocationRequest(
+ requester=f"test_requester{i}",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ priority=100 + i,
+ )
+ for i in range(1, 4)
+ ]
+ for request in requests:
+ request.add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+
+ active_requests = [resource_allocation_manager.request_resources(req, lambda x: None) for req in requests]
+ resource_allocation_manager.process_active_requests()
+
+ # Ensure that requests[0] is allocated and the rest are pending
+ active_request_3 = resource_allocation_manager.get_active_request(active_requests[2].id)
+ assert active_request_3.status == ResourceRequestAllocationStatus.PENDING
+ assert active_request_3.request.requester == "test_requester3"
+ assert active_request_3.request.priority == 103
+
+ active_request_2 = resource_allocation_manager.get_active_request(active_requests[1].id)
+ assert active_request_2.status == ResourceRequestAllocationStatus.PENDING
+ assert active_request_2.request.requester == "test_requester2"
+ assert active_request_2.request.priority == 102
+
+ active_request_1 = resource_allocation_manager.get_active_request(active_requests[0].id)
+ assert active_request_1.status == ResourceRequestAllocationStatus.ALLOCATED
+ assert active_request_1.request.requester == "test_requester1"
+ assert active_request_1.request.priority == 101
+
+ resource_allocation_manager.release_resources(active_request_1)
+
+ resource_allocation_manager.process_active_requests()
+
+ # Ensure that requests[1] is now allocated and requests[2] is still pending
+ active_request_3 = resource_allocation_manager.get_active_request(active_requests[2].id)
+ assert active_request_3.status == ResourceRequestAllocationStatus.PENDING
+ assert active_request_3.request.requester == "test_requester3"
+ assert active_request_3.request.priority == 103
+
+ active_request_2 = resource_allocation_manager.get_active_request(active_requests[1].id)
+ assert active_request_2.status == ResourceRequestAllocationStatus.ALLOCATED
+ assert active_request_2.request.requester == "test_requester2"
+ assert active_request_2.request.priority == 102
+
+ def test_release_resources(self, resource_allocation_manager):
+ request = ResourceAllocationRequest(
+ requester="test_requester",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ priority=1,
+ )
+ request.add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+ request.add_resource("026749f8f40342b38157f9824ae2f512", "", ResourceType.CONTAINER)
+
+ active_request = resource_allocation_manager.request_resources(request, lambda x: None)
+
+ resource_allocation_manager.process_active_requests()
+
+ resource_allocation_manager.release_resources(active_request)
+
+ assert (
+ resource_allocation_manager.get_active_request(active_request.id).status
+ == ResourceRequestAllocationStatus.COMPLETED
+ )
+
+ def test_process_active_requests(self, resource_allocation_manager):
+ requests = [
+ ResourceAllocationRequest(
+ requester=f"test_requester{i}",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ )
+ for i in range(1, 3)
+ ]
+ for request in requests:
+ request.add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+
+ active_requests = [resource_allocation_manager.request_resources(req, lambda x: None) for req in requests]
+
+ resource_allocation_manager.process_active_requests()
+
+ assert (
+ resource_allocation_manager.get_active_request(active_requests[0].id).status
+ == ResourceRequestAllocationStatus.ALLOCATED
+ )
+ assert (
+ resource_allocation_manager.get_active_request(active_requests[1].id).status
+ == ResourceRequestAllocationStatus.PENDING
+ )
+
+ def test_abort_active_request(self, resource_allocation_manager):
+ request = ResourceAllocationRequest(
+ requester="test_requester",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ )
+ request.add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+ request.add_resource("magnetic_mixer_2", LAB_ID, ResourceType.DEVICE)
+
+ active_request = resource_allocation_manager.request_resources(request, lambda x: None)
+
+ resource_allocation_manager.abort_active_request(active_request.id)
+
+ assert resource_allocation_manager.get_active_request(active_request.id).status == (
+ ResourceRequestAllocationStatus.ABORTED
+ )
+
+ assert not resource_allocation_manager._device_allocation_manager.is_allocated(LAB_ID, "magnetic_mixer")
+ assert not resource_allocation_manager._device_allocation_manager.is_allocated(LAB_ID, "magnetic_mixer_2")
+
+ def test_get_all_active_requests(self, resource_allocation_manager):
+ requests = [
+ ResourceAllocationRequest(
+ requester=f"test_requester{i}",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ )
+ for i in range(1, 3)
+ ]
+ requests[0].add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+ requests[1].add_resource("026749f8f40342b38157f9824ae2f512", "", ResourceType.CONTAINER)
+
+ for request in requests:
+ resource_allocation_manager.request_resources(request, lambda x: None)
+
+ all_active_requests = resource_allocation_manager.get_all_active_requests()
+ assert len(all_active_requests) == 2
+ assert all_active_requests[0].request == requests[0]
+ assert all_active_requests[1].request == requests[1]
+
+ def test_get_active_request_nonexistent(self, resource_allocation_manager):
+ nonexistent_id = ObjectId()
+ assert resource_allocation_manager.get_active_request(nonexistent_id) is None
+
+ def test_clean_requests(self, resource_allocation_manager):
+ request = ResourceAllocationRequest(
+ requester="test_requester",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ )
+ request.add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+
+ active_request = resource_allocation_manager.request_resources(request, lambda x: None)
+ resource_allocation_manager.process_active_requests()
+ resource_allocation_manager.release_resources(active_request)
+
+ assert (
+ resource_allocation_manager.get_active_request(active_request.id).status
+ == ResourceRequestAllocationStatus.COMPLETED
+ )
+
+ resource_allocation_manager._clean_completed_and_aborted_requests()
+
+ assert len(resource_allocation_manager.get_all_active_requests()) == 0
+
+ def test_all_or_nothing_allocation(self, resource_allocation_manager):
+ request = ResourceAllocationRequest(
+ requester="test_requester",
+ reason="Needed for experiment",
+ experiment_id="water_purification_1",
+ )
+ request.add_resource("magnetic_mixer", LAB_ID, ResourceType.DEVICE)
+ request.add_resource("nonexistent_device", LAB_ID, ResourceType.DEVICE)
+
+ with pytest.raises(EosDeviceNotFoundError):
+ active_request = resource_allocation_manager.request_resources(request, lambda x: None)
+ resource_allocation_manager.process_active_requests()
+
+ assert active_request.status == ResourceRequestAllocationStatus.PENDING
+
+ # Verify that neither resource was allocated
+ assert not resource_allocation_manager._device_allocation_manager.is_allocated(LAB_ID, "magnetic_mixer")
+
+ with pytest.raises(EosDeviceNotFoundError):
+ assert not resource_allocation_manager._device_allocation_manager.is_allocated(LAB_ID, "nonexistent_device")
diff --git a/tests/test_task_executor.py b/tests/test_task_executor.py
new file mode 100644
index 0000000..d034fe2
--- /dev/null
+++ b/tests/test_task_executor.py
@@ -0,0 +1,100 @@
+import asyncio
+
+from eos.configuration.entities.task import TaskConfig, TaskDeviceConfig
+from eos.resource_allocation.entities.resource_request import (
+ ResourceAllocationRequest,
+ ResourceType,
+)
+from eos.tasks.entities.task_execution_parameters import TaskExecutionParameters
+from eos.tasks.exceptions import EosTaskResourceAllocationError
+from tests.fixtures import *
+
+
+@pytest.mark.parametrize(
+ "setup_lab_experiment",
+ [("small_lab", "water_purification")],
+ indirect=True,
+)
+class TestTaskExecutor:
+ @pytest.mark.asyncio
+ async def test_request_task_execution(
+ self,
+ task_executor,
+ experiment_manager,
+ experiment_graph,
+ ):
+ experiment_manager.create_experiment("water_purification", "water_purification")
+
+ task_config = experiment_graph.get_task_config("mixing")
+ task_config.parameters["time"] = 5
+
+ task_parameters = TaskExecutionParameters(
+ experiment_id="water_purification",
+ devices=[TaskDeviceConfig(lab_id="small_lab", id="magnetic_mixer")],
+ task_config=task_config,
+ )
+ task_output_parameters, _, _ = await task_executor.request_task_execution(task_parameters)
+ assert task_output_parameters["mixing_time"] == 5
+
+ task_parameters.task_config.id = "mixing2"
+ task_output_parameters, _, _ = await task_executor.request_task_execution(task_parameters)
+ assert task_output_parameters["mixing_time"] == 5
+
+ task_parameters.task_config.id = "mixing3"
+ task_output_parameters, _, _ = await task_executor.request_task_execution(task_parameters)
+ assert task_output_parameters["mixing_time"] == 5
+
+ @pytest.mark.asyncio
+ async def test_request_task_execution_resource_request_timeout(
+ self,
+ task_executor,
+ experiment_manager,
+ experiment_graph,
+ resource_allocation_manager,
+ ):
+ request = ResourceAllocationRequest(
+ requester="tester",
+ )
+ request.add_resource("magnetic_mixer", "small_lab", ResourceType.DEVICE)
+ active_request = resource_allocation_manager.request_resources(request, lambda requests: None)
+ resource_allocation_manager.process_active_requests()
+
+ experiment_manager.create_experiment("water_purification", "water_purification")
+
+ task_config = experiment_graph.get_task_config("mixing")
+ task_config.parameters["time"] = 5
+ task_parameters = TaskExecutionParameters(
+ experiment_id="water_purification",
+ devices=[TaskDeviceConfig(lab_id="small_lab", id="magnetic_mixer")],
+ task_config=task_config,
+ resource_allocation_timeout=1,
+ )
+ with pytest.raises(EosTaskResourceAllocationError):
+ await task_executor.request_task_execution(task_parameters)
+
+ resource_allocation_manager.release_resources(active_request)
+
+ @pytest.mark.asyncio
+ async def test_request_task_cancellation(self, task_executor, experiment_manager):
+ experiment_manager.create_experiment("water_purification", "water_purification")
+
+ sleep_config = TaskConfig(
+ id="sleep_task",
+ type="Sleep",
+ devices=[TaskDeviceConfig(lab_id="small_lab", id="general_computer")],
+ parameters={"sleep_time": 2},
+ )
+ task_parameters = TaskExecutionParameters(
+ experiment_id="water_purification",
+ task_config=sleep_config,
+ )
+
+ tasks = set()
+
+ task = asyncio.create_task(task_executor.request_task_execution(task_parameters))
+ tasks.add(task)
+ await asyncio.sleep(1)
+
+ await task_executor.request_task_cancellation(task_parameters.experiment_id, task_parameters.task_config.id)
+
+ assert True
diff --git a/tests/test_task_input_parameter_validator.py b/tests/test_task_input_parameter_validator.py
new file mode 100644
index 0000000..75b1d29
--- /dev/null
+++ b/tests/test_task_input_parameter_validator.py
@@ -0,0 +1,129 @@
+import pytest
+from omegaconf import DictConfig
+
+from eos.configuration.entities.parameters import ParameterType
+from eos.configuration.entities.task import TaskConfig
+from eos.configuration.entities.task_specification import TaskSpecification
+from eos.tasks.exceptions import EosTaskValidationError
+from eos.tasks.task_input_parameter_validator import TaskInputParameterValidator
+
+
+class TestTaskInputParameterValidator:
+ @pytest.fixture
+ def task_spec(self):
+ return TaskSpecification(
+ type="test_task",
+ description="A test task",
+ input_parameters={
+ "integer_param": DictConfig(
+ {"type": "integer", "unit": "n/a", "description": "An integer parameter", "min": 0, "max": 100}
+ ),
+ "decimal_param": DictConfig(
+ {"type": "decimal", "unit": "n/a", "description": "A float parameter", "min": 0.0, "max": 1.0}
+ ),
+ "string_param": DictConfig({"type": "string", "description": "A string parameter"}),
+ "boolean_param": DictConfig({"type": "boolean", "value": False, "description": "A boolean parameter"}),
+ "list_param": DictConfig(
+ {"type": "list", "description": "A list parameter", "element_type": "integer", "length": 3}
+ ),
+ "dictionary_param": DictConfig({"type": "dictionary", "description": "A dictionary parameter"}),
+ "choice_param": DictConfig(
+ {"type": "choice", "value": "A", "description": "A choice parameter", "choices": ["A", "B", "C"]}
+ ),
+ },
+ )
+
+ @pytest.fixture
+ def task_config(self, task_spec):
+ return TaskConfig(
+ id="test_task_1",
+ type="test_task",
+ parameters={
+ "integer_param": 50,
+ "decimal_param": 0.5,
+ "string_param": "test",
+ "boolean_param": True,
+ "list_param": [1, 2, 3],
+ "dictionary_param": {"key": "value"},
+ "choice_param": "A",
+ },
+ )
+
+ @pytest.fixture
+ def validator(self, task_config, task_spec):
+ return TaskInputParameterValidator(task_config, task_spec)
+
+ def test_valid_input_parameters(self, validator):
+ validator.validate_input_parameters() # Should not raise any exceptions
+
+ @pytest.mark.parametrize(
+ ("param_name", "invalid_value"),
+ [
+ ("integer_param", "not_an_int"),
+ ("decimal_param", "not_a_float"),
+ ("boolean_param", "not_a_bool"),
+ ("list_param", "not_a_list"),
+ ("dictionary_param", "not_a_dict"),
+ ("choice_param", "D"),
+ ],
+ )
+ def test_invalid_input_parameters(self, validator, task_config, param_name, invalid_value):
+ task_config.parameters[param_name] = invalid_value
+ with pytest.raises(EosTaskValidationError):
+ validator.validate_input_parameters()
+
+ def test_missing_required_parameter(self, validator, task_config):
+ del task_config.parameters["integer_param"]
+ with pytest.raises(EosTaskValidationError):
+ validator.validate_input_parameters()
+
+ def test_extra_parameter(self, validator, task_config):
+ task_config.parameters["extra_param"] = "extra"
+ with pytest.raises(EosTaskValidationError):
+ validator.validate_input_parameters()
+
+ @pytest.mark.parametrize(
+ ("param_type", "valid_values", "invalid_values"),
+ [
+ (ParameterType.integer, [0, 50, 100, "50"], [-1, 101, "fifty"]),
+ (ParameterType.decimal, [0.0, 0.5, 1.0, "0.5"], [-0.1, 1.1, "half"]),
+ (ParameterType.boolean, [True, False, "true", "false"], ["yes", "no", 2]),
+ (ParameterType.string, ["test", "123", ""], []),
+ (ParameterType.list, [[1, 2, 3], [1, 2, 62]], [[1, 2], [1, 2, 3, 4], "not_a_list"]),
+ (ParameterType.dictionary, [{"key": "value"}, {}], ["not_a_dict", [1, 2, 3]]),
+ (ParameterType.choice, ["A", "B", "C"], ["D", 1, True]),
+ ],
+ )
+ def test_parameter_type_conversion(
+ self, validator, task_config, task_spec, param_type, valid_values, invalid_values
+ ):
+ param_name = f"{param_type.value}_param"
+ task_spec.input_parameters[param_name]["type"] = param_type.value
+ if param_type == ParameterType.choice:
+ task_spec.input_parameters[param_name]["choices"] = ["A", "B", "C"]
+ elif param_type == ParameterType.list:
+ task_spec.input_parameters[param_name]["element_type"] = "integer"
+ task_spec.input_parameters[param_name]["length"] = 3
+
+ for valid_value in valid_values:
+ task_config.parameters[param_name] = valid_value
+ validator.validate_input_parameters() # Should not raise any exceptions
+
+ for invalid_value in invalid_values:
+ task_config.parameters[param_name] = invalid_value
+ with pytest.raises(EosTaskValidationError):
+ validator.validate_input_parameters()
+
+ @pytest.mark.parametrize(
+ ("param_name", "invalid_value", "expected_error"),
+ [
+ ("integer_param", "$.some_reference", EosTaskValidationError),
+ ("integer_param", "eos_dynamic", EosTaskValidationError),
+ ("integer_param", 150, EosTaskValidationError),
+ ("list_param", [1, 2, 3, 4], EosTaskValidationError),
+ ],
+ )
+ def test_specific_validation_cases(self, validator, task_config, param_name, invalid_value, expected_error):
+ task_config.parameters[param_name] = invalid_value
+ with pytest.raises(expected_error):
+ validator.validate_input_parameters()
diff --git a/tests/test_task_manager.py b/tests/test_task_manager.py
new file mode 100644
index 0000000..2b74b44
--- /dev/null
+++ b/tests/test_task_manager.py
@@ -0,0 +1,112 @@
+from eos.tasks.entities.task import TaskStatus, TaskOutput
+from eos.tasks.exceptions import EosTaskStateError, EosTaskExistsError
+from tests.fixtures import *
+
+EXPERIMENT_ID = "water_purification"
+
+
+@pytest.fixture
+def experiment_manager(configuration_manager, db_manager):
+ experiment_manager = ExperimentManager(configuration_manager, db_manager)
+ experiment_manager.create_experiment(EXPERIMENT_ID, "water_purification")
+ return experiment_manager
+
+
+@pytest.mark.parametrize("setup_lab_experiment", [("small_lab", "water_purification")], indirect=True)
+class TestTaskManager:
+ def test_create_task(self, task_manager, experiment_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+
+ task = task_manager.get_task(EXPERIMENT_ID, "mixing")
+ assert task.id == "mixing"
+ assert task.type == "Magnetic Mixing"
+
+ def test_create_task_nonexistent(self, task_manager, experiment_manager):
+ with pytest.raises(EosTaskStateError):
+ task_manager.create_task(EXPERIMENT_ID, "nonexistent", "nonexistent", [])
+
+ def test_create_task_nonexistent_task_type(self, task_manager, experiment_manager):
+ with pytest.raises(EosTaskStateError):
+ task_manager.create_task(EXPERIMENT_ID, "nonexistent_task", "Nonexistent", [])
+
+ def test_create_existing_task(self, task_manager, experiment_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+
+ with pytest.raises(EosTaskExistsError):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+
+ def test_delete_task(self, task_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+
+ task_manager.delete_task(EXPERIMENT_ID, "mixing")
+
+ assert task_manager.get_task(EXPERIMENT_ID, "mixing") is None
+
+ def test_delete_nonexistent_task(self, task_manager, experiment_manager):
+ with pytest.raises(EosTaskStateError):
+ task_manager.delete_task(EXPERIMENT_ID, "nonexistent_task")
+
+ def test_get_all_tasks_by_status(self, task_manager, experiment_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+ task_manager.create_task(EXPERIMENT_ID, "purification", "Purification", [])
+
+ task_manager.start_task(EXPERIMENT_ID, "mixing")
+ task_manager.complete_task(EXPERIMENT_ID, "purification")
+
+ assert len(task_manager.get_tasks(experiment_id=EXPERIMENT_ID, status=TaskStatus.RUNNING.value)) == 1
+ assert len(task_manager.get_tasks(experiment_id=EXPERIMENT_ID, status=TaskStatus.COMPLETED.value)) == 1
+
+ def test_set_task_status(self, task_manager, experiment_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+
+ assert task_manager.get_task(EXPERIMENT_ID, "mixing").status == TaskStatus.CREATED
+
+ task_manager.start_task(EXPERIMENT_ID, "mixing")
+ assert task_manager.get_task(EXPERIMENT_ID, "mixing").status == TaskStatus.RUNNING
+
+ task_manager.complete_task(EXPERIMENT_ID, "mixing")
+ assert task_manager.get_task(EXPERIMENT_ID, "mixing").status == TaskStatus.COMPLETED
+
+ def test_set_task_status_nonexistent_task(self, task_manager, experiment_manager):
+ with pytest.raises(EosTaskStateError):
+ task_manager.start_task(EXPERIMENT_ID, "nonexistent_task")
+
+ def test_start_task(self, task_manager, experiment_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+
+ task_manager.start_task(EXPERIMENT_ID, "mixing")
+ assert "mixing" in experiment_manager.get_running_tasks(EXPERIMENT_ID)
+
+ def test_start_task_nonexistent_experiment(self, task_manager, experiment_manager):
+ with pytest.raises(EosTaskStateError):
+ task_manager.start_task(EXPERIMENT_ID, "nonexistent_task")
+
+ def test_complete_task(self, task_manager, experiment_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+ task_manager.start_task(EXPERIMENT_ID, "mixing")
+ task_manager.complete_task(EXPERIMENT_ID, "mixing")
+ assert "mixing" not in experiment_manager.get_running_tasks(EXPERIMENT_ID)
+ assert "mixing" in experiment_manager.get_completed_tasks(EXPERIMENT_ID)
+
+ def test_complete_task_nonexistent_experiment(self, task_manager, experiment_manager):
+ with pytest.raises(EosTaskStateError):
+ task_manager.complete_task(EXPERIMENT_ID, "nonexistent_task")
+
+ def test_add_task_output(self, task_manager):
+ task_manager.create_task(EXPERIMENT_ID, "mixing", "Magnetic Mixing", [])
+
+ task_output = TaskOutput(
+ experiment_id=EXPERIMENT_ID,
+ task_id="mixing",
+ parameters={"x": 5},
+ file_names=["file"],
+ )
+ task_manager.add_task_output(EXPERIMENT_ID, "mixing", task_output)
+ task_manager.add_task_output_file(EXPERIMENT_ID, "mixing", "file", b"file_data")
+
+ output = task_manager.get_task_output(experiment_id=EXPERIMENT_ID, task_id="mixing")
+ assert output.parameters == {"x": 5}
+ assert output.file_names == ["file"]
+
+ output_file = task_manager.get_task_output_file(experiment_id=EXPERIMENT_ID, task_id="mixing", file_name="file")
+ assert output_file == b"file_data"
diff --git a/tests/test_task_specification_validation.py b/tests/test_task_specification_validation.py
new file mode 100644
index 0000000..8561c3a
--- /dev/null
+++ b/tests/test_task_specification_validation.py
@@ -0,0 +1,262 @@
+from eos.configuration.entities.parameters import (
+ ParameterFactory,
+ ParameterType,
+)
+from eos.configuration.entities.task_specification import (
+ TaskSpecificationOutputParameter,
+ TaskSpecification,
+)
+from eos.configuration.exceptions import EosConfigurationError
+from tests.fixtures import *
+
+
+class TestTaskSpecifications:
+ def test_invalid_parameter_type(self):
+ with pytest.raises(ValueError):
+ ParameterFactory.create_parameter(
+ "invalid_type",
+ value=120,
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_numeric_parameter_unit_not_specified(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.integer,
+ unit="",
+ value=120,
+ min=60,
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_numeric_parameter_value_not_numeric(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.integer,
+ unit="sec",
+ value="not_a_number",
+ min=60,
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_numeric_parameter_min_greater_than_max(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.integer,
+ unit="sec",
+ value=120,
+ min=300,
+ max=60,
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_numeric_parameter_out_of_range_min(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.integer,
+ unit="sec",
+ value=5,
+ min=60,
+ max=300,
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_numeric_parameter_out_of_range_max(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.integer,
+ unit="sec",
+ value=100,
+ min=0,
+ max=80,
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_boolean_parameter_invalid_value(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.boolean,
+ value="not_a_boolean",
+ description="Whether to sparge the evaporation vessel with nitrogen.",
+ )
+
+ def test_choice_parameter_choices_not_specified(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.choice,
+ choices=[],
+ value="method1",
+ description="Method to use",
+ )
+
+ def test_choice_parameter_no_value(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.choice,
+ choices=["method1", "method2"],
+ value=None,
+ description="Method to use",
+ )
+
+ def test_choice_parameter_invalid_value(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.choice,
+ choices=["method1", "method2"],
+ value="invalid_method",
+ description="Method to use",
+ )
+
+ def test_list_parameter_invalid_element_type(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="invalid_type",
+ value=[1, 2, 3],
+ description="List of elements",
+ )
+
+ def test_list_parameter_nested_list(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="list",
+ value=[[1], [2], [3]],
+ description="List of elements",
+ )
+
+ def test_list_parameter_invalid_value(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=4,
+ description="List of elements",
+ )
+
+ def test_list_parameter_elements_not_same_type(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[1, True, "3"],
+ description="List of elements",
+ )
+
+ def test_list_parameter_invalid_value_element_size(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[1, 2],
+ description="List of elements",
+ )
+
+ def test_list_parameter_invalid_value_element_min(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[1, 2, 3],
+ min=[2, 2, "INVALID"],
+ description="List of elements",
+ )
+
+ def test_list_parameter_invalid_value_element_max(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[1, 2, 3],
+ max=[2, 2, "INVALID"],
+ description="List of elements",
+ )
+
+ def test_list_parameter_value_less_than_min(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[2, 2, 2],
+ min=[2, 2, 3],
+ description="List of elements",
+ )
+
+ def test_list_parameter_value_greater_than_max(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[2, 2, 2],
+ max=[2, 2, 1],
+ description="List of elements",
+ )
+
+ def test_list_parameter_invalid_min_max_size(self):
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[2, 2, 2],
+ min=[2, 2],
+ description="List of elements",
+ )
+
+ with pytest.raises(EosConfigurationError):
+ ParameterFactory.create_parameter(
+ ParameterType.list,
+ length=3,
+ element_type="integer",
+ value=[2, 2, 2],
+ max=[2, 2],
+ description="List of elements",
+ )
+
+ def test_parameter_factory_invalid_type(self):
+ with pytest.raises(ValueError):
+ ParameterFactory.create_parameter(
+ "invalid_type",
+ value=120,
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_parameter_invalid_name(self, configuration_manager):
+ task_specs = configuration_manager.task_specs
+
+ task_spec = task_specs.get_spec_by_type("Magnetic Mixing")
+
+ task_spec.input_parameters["invalid_name*"] = {
+ "type": "integer",
+ "unit": "sec",
+ "value": 120,
+ "description": "Duration of evaporation in seconds.",
+ }
+
+ with pytest.raises(EosConfigurationError):
+ TaskSpecification(**task_spec)
+
+ def test_output_numeric_parameter_unit_not_specified(self, configuration_manager):
+ with pytest.raises(EosConfigurationError):
+ TaskSpecificationOutputParameter(
+ type=ParameterType.integer,
+ unit="",
+ description="Duration of evaporation in seconds.",
+ )
+
+ def test_output_non_numeric_parameter_unit_specified(self, configuration_manager):
+ with pytest.raises(EosConfigurationError):
+ TaskSpecificationOutputParameter(
+ type=ParameterType.boolean,
+ unit="sec",
+ description="Whether to sparge the evaporation vessel with nitrogen.",
+ )
diff --git a/tests/user/testing/common/__init__.py b/tests/user/testing/common/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/user/testing/devices/abstract_lab/DT1/device.py b/tests/user/testing/devices/abstract_lab/DT1/device.py
new file mode 100644
index 0000000..611920d
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT1/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class DT1Device(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/abstract_lab/DT1/device.yml b/tests/user/testing/devices/abstract_lab/DT1/device.yml
new file mode 100644
index 0000000..2fd54c5
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT1/device.yml
@@ -0,0 +1,2 @@
+type: DT1
+description: An abstract device for testing
diff --git a/tests/user/testing/devices/abstract_lab/DT2/device.py b/tests/user/testing/devices/abstract_lab/DT2/device.py
new file mode 100644
index 0000000..ba688c5
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT2/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class DT2Device(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/abstract_lab/DT2/device.yml b/tests/user/testing/devices/abstract_lab/DT2/device.yml
new file mode 100644
index 0000000..758cf63
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT2/device.yml
@@ -0,0 +1,2 @@
+type: DT2
+description: An abstract device for testing
diff --git a/tests/user/testing/devices/abstract_lab/DT3/device.py b/tests/user/testing/devices/abstract_lab/DT3/device.py
new file mode 100644
index 0000000..04ebc26
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT3/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class DT3Device(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/abstract_lab/DT3/device.yml b/tests/user/testing/devices/abstract_lab/DT3/device.yml
new file mode 100644
index 0000000..52e952d
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT3/device.yml
@@ -0,0 +1,2 @@
+type: DT3
+description: An abstract device for testing
diff --git a/tests/user/testing/devices/abstract_lab/DT4/device.py b/tests/user/testing/devices/abstract_lab/DT4/device.py
new file mode 100644
index 0000000..5eaecef
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT4/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class DT4Device(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/abstract_lab/DT4/device.yml b/tests/user/testing/devices/abstract_lab/DT4/device.yml
new file mode 100644
index 0000000..2257574
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT4/device.yml
@@ -0,0 +1,2 @@
+type: DT4
+description: An abstract device for testing
diff --git a/tests/user/testing/devices/abstract_lab/DT5/device.py b/tests/user/testing/devices/abstract_lab/DT5/device.py
new file mode 100644
index 0000000..6ab25e4
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT5/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class DT5Device(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/abstract_lab/DT5/device.yml b/tests/user/testing/devices/abstract_lab/DT5/device.yml
new file mode 100644
index 0000000..47bc540
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT5/device.yml
@@ -0,0 +1,2 @@
+type: DT5
+description: An abstract device for testing
diff --git a/tests/user/testing/devices/abstract_lab/DT6/device.py b/tests/user/testing/devices/abstract_lab/DT6/device.py
new file mode 100644
index 0000000..ac2ab08
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT6/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class DT6Device(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/abstract_lab/DT6/device.yml b/tests/user/testing/devices/abstract_lab/DT6/device.yml
new file mode 100644
index 0000000..32f8009
--- /dev/null
+++ b/tests/user/testing/devices/abstract_lab/DT6/device.yml
@@ -0,0 +1,2 @@
+type: DT6
+description: An abstract device for testing
diff --git a/tests/user/testing/devices/multiplication_lab/analyzer/device.py b/tests/user/testing/devices/multiplication_lab/analyzer/device.py
new file mode 100644
index 0000000..0bbbd3f
--- /dev/null
+++ b/tests/user/testing/devices/multiplication_lab/analyzer/device.py
@@ -0,0 +1,17 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class AnalyzerDevice(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
+
+ def analyze_result(self, number: int, product: int) -> int:
+ return number + 100 * abs(product - 1024)
diff --git a/tests/user/testing/devices/multiplication_lab/analyzer/device.yml b/tests/user/testing/devices/multiplication_lab/analyzer/device.yml
new file mode 100644
index 0000000..42333c7
--- /dev/null
+++ b/tests/user/testing/devices/multiplication_lab/analyzer/device.yml
@@ -0,0 +1,2 @@
+type: analyzer
+description: A device for analyzing the result of the multiplication of two numbers
diff --git a/tests/user/testing/devices/multiplication_lab/multiplier/device.py b/tests/user/testing/devices/multiplication_lab/multiplier/device.py
new file mode 100644
index 0000000..33ed2ba
--- /dev/null
+++ b/tests/user/testing/devices/multiplication_lab/multiplier/device.py
@@ -0,0 +1,17 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class MultiplierDevice(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
+
+ def multiply(self, a: int, b: int) -> int:
+ return a * b
diff --git a/tests/user/testing/devices/multiplication_lab/multiplier/device.yml b/tests/user/testing/devices/multiplication_lab/multiplier/device.yml
new file mode 100644
index 0000000..efe0ee4
--- /dev/null
+++ b/tests/user/testing/devices/multiplication_lab/multiplier/device.yml
@@ -0,0 +1,2 @@
+type: multiplier
+description: A device for multiplying two numbers
diff --git a/tests/user/testing/devices/small_lab/computer/device.py b/tests/user/testing/devices/small_lab/computer/device.py
new file mode 100644
index 0000000..5fd44c4
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/computer/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class ComputerDevice(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/small_lab/computer/device.yml b/tests/user/testing/devices/small_lab/computer/device.yml
new file mode 100644
index 0000000..40c6f12
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/computer/device.yml
@@ -0,0 +1,2 @@
+type: computer
+description: General-purpose computer
diff --git a/tests/user/testing/devices/small_lab/evaporator/device.py b/tests/user/testing/devices/small_lab/evaporator/device.py
new file mode 100644
index 0000000..ceec58c
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/evaporator/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class EvaporatorDevice(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/small_lab/evaporator/device.yml b/tests/user/testing/devices/small_lab/evaporator/device.yml
new file mode 100644
index 0000000..e2dd67b
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/evaporator/device.yml
@@ -0,0 +1,2 @@
+type: evaporator
+description: Evaporator for substance purification
diff --git a/tests/user/testing/devices/small_lab/fridge/device.py b/tests/user/testing/devices/small_lab/fridge/device.py
new file mode 100644
index 0000000..e818aad
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/fridge/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class FridgeDevice(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/small_lab/fridge/device.yml b/tests/user/testing/devices/small_lab/fridge/device.yml
new file mode 100644
index 0000000..8acfc81
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/fridge/device.yml
@@ -0,0 +1,2 @@
+type: fridge
+description: Fridge for storing temperature-sensitive substances
diff --git a/tests/user/testing/devices/small_lab/magnetic_mixer/device.py b/tests/user/testing/devices/small_lab/magnetic_mixer/device.py
new file mode 100644
index 0000000..68f2cae
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/magnetic_mixer/device.py
@@ -0,0 +1,14 @@
+from typing import Any
+
+from eos.devices.base_device import BaseDevice
+
+
+class MagneticMixerDevice(BaseDevice):
+ def _initialize(self, initialization_parameters: dict[str, Any]) -> None:
+ pass
+
+ def _cleanup(self) -> None:
+ pass
+
+ def _report(self) -> dict[str, Any]:
+ pass
diff --git a/tests/user/testing/devices/small_lab/magnetic_mixer/device.yml b/tests/user/testing/devices/small_lab/magnetic_mixer/device.yml
new file mode 100644
index 0000000..f3f3128
--- /dev/null
+++ b/tests/user/testing/devices/small_lab/magnetic_mixer/device.yml
@@ -0,0 +1,2 @@
+type: magnetic_mixer
+description: Magnetic mixer for mixing substances
diff --git a/tests/user/testing/experiments/abstract_experiment/experiment.yml b/tests/user/testing/experiments/abstract_experiment/experiment.yml
new file mode 100644
index 0000000..cac30cc
--- /dev/null
+++ b/tests/user/testing/experiments/abstract_experiment/experiment.yml
@@ -0,0 +1,61 @@
+type: abstract_experiment
+description: An abstract experiment for testing
+
+labs:
+ - abstract_lab
+
+tasks:
+ - id: A
+ type: Noop
+ devices:
+ - lab_id: abstract_lab
+ id: D2
+
+ - id: B
+ type: Noop
+ dependencies: [ "A" ]
+ devices:
+ - lab_id: abstract_lab
+ id: D1
+
+ - id: C
+ type: Noop
+ dependencies: [ "A" ]
+ devices:
+ - lab_id: abstract_lab
+ id: D3
+
+ - id: D
+ type: Noop
+ dependencies: [ "A" ]
+ devices:
+ - lab_id: abstract_lab
+ id: D1
+
+ - id: E
+ type: Noop
+ dependencies: [ "B" ]
+ devices:
+ - lab_id: abstract_lab
+ id: D3
+
+ - id: F
+ type: Noop
+ dependencies: [ "C" ]
+ devices:
+ - lab_id: abstract_lab
+ id: D2
+
+ - id: G
+ type: Noop
+ dependencies: [ "D", "E", "F" ]
+ devices:
+ - lab_id: abstract_lab
+ id: D5
+
+ - id: H
+ type: Noop
+ dependencies: [ "G" ]
+ devices:
+ - lab_id: abstract_lab
+ id: D6
diff --git a/tests/user/testing/experiments/optimize_multiplication/experiment.yml b/tests/user/testing/experiments/optimize_multiplication/experiment.yml
new file mode 100644
index 0000000..8322acb
--- /dev/null
+++ b/tests/user/testing/experiments/optimize_multiplication/experiment.yml
@@ -0,0 +1,33 @@
+type: optimize_multiplication
+description: An experiment for finding the smallest number that when multiplied by two factors yields 1024
+
+labs:
+ - multiplication_lab
+
+tasks:
+ - id: mult_1
+ type: Multiplication
+ devices:
+ - lab_id: multiplication_lab
+ id: multiplier
+ parameters:
+ number: eos_dynamic
+ factor: eos_dynamic
+ - id: mult_2
+ type: Multiplication
+ devices:
+ - lab_id: multiplication_lab
+ id: multiplier
+ dependencies: [ mult_1 ]
+ parameters:
+ number: mult_1.product
+ factor: eos_dynamic
+ - id: compute_multiplication_objective
+ type: Compute Multiplication Objective
+ devices:
+ - lab_id: multiplication_lab
+ id: analyzer
+ dependencies: [ mult_1, mult_2 ]
+ parameters:
+ number: mult_1.number
+ product: mult_2.product
diff --git a/tests/user/testing/experiments/optimize_multiplication/optimizer.py b/tests/user/testing/experiments/optimize_multiplication/optimizer.py
new file mode 100644
index 0000000..3db60b7
--- /dev/null
+++ b/tests/user/testing/experiments/optimize_multiplication/optimizer.py
@@ -0,0 +1,27 @@
+from bofire.data_models.acquisition_functions.acquisition_function import qNEI
+from bofire.data_models.enum import SamplingMethodEnum
+from bofire.data_models.features.continuous import ContinuousOutput
+from bofire.data_models.features.discrete import DiscreteInput
+from bofire.data_models.objectives.identity import MinimizeObjective
+
+from eos.optimization.sequential_bayesian_optimizer import BayesianSequentialOptimizer
+from eos.optimization.abstract_sequential_optimizer import AbstractSequentialOptimizer
+
+
+def eos_create_campaign_optimizer() -> tuple[dict, type[AbstractSequentialOptimizer]]:
+ constructor_args = {
+ "inputs": [
+ DiscreteInput(key="mult_1.number", values=list(range(2, 34))),
+ DiscreteInput(key="mult_1.factor", values=list(range(2, 18))),
+ DiscreteInput(key="mult_2.factor", values=list(range(2, 18))),
+ ],
+ "outputs": [
+ ContinuousOutput(key="compute_multiplication_objective.objective", objective=MinimizeObjective(w=1.0)),
+ ],
+ "constraints": [],
+ "acquisition_function": qNEI(),
+ "num_initial_samples": 5,
+ "initial_sampling_method": SamplingMethodEnum.SOBOL,
+ }
+
+ return constructor_args, BayesianSequentialOptimizer
diff --git a/tests/user/testing/experiments/water_purification/experiment.yml b/tests/user/testing/experiments/water_purification/experiment.yml
new file mode 100644
index 0000000..6f791a1
--- /dev/null
+++ b/tests/user/testing/experiments/water_purification/experiment.yml
@@ -0,0 +1,42 @@
+type: water_purification
+description: Experiment to find best parameters for purifying water using evaporation
+
+labs:
+ - small_lab
+
+containers:
+ - id: 026749f8f40342b38157f9824ae2f512
+ metadata:
+ substance: salt_water
+
+tasks:
+ - id: mixing
+ type: Magnetic Mixing
+ devices:
+ - lab_id: small_lab
+ id: magnetic_mixer
+ description: Magnetically mix water and salt
+
+ containers:
+ beaker: 026749f8f40342b38157f9824ae2f512
+ parameters:
+ speed: 60
+ time: eos_dynamic
+
+ - id: evaporation
+ type: Purification
+ devices:
+ - lab_id: small_lab
+ id: evaporator
+ description: Purification of water using evaporation
+ dependencies: [ "mixing" ]
+
+ containers:
+ beaker: 026749f8f40342b38157f9824ae2f512
+ parameters:
+ method: evaporation
+ evaporation_time: mixing.mixing_time
+ evaporation_temperature: eos_dynamic
+ evaporation_rotation_speed: eos_dynamic
+ evaporation_sparging: true
+ evaporation_sparging_flow: eos_dynamic
diff --git a/tests/user/testing/labs/abstract_lab/abstract_lab.map b/tests/user/testing/labs/abstract_lab/abstract_lab.map
new file mode 100644
index 0000000..e69de29
diff --git a/tests/user/testing/labs/abstract_lab/lab.yml b/tests/user/testing/labs/abstract_lab/lab.yml
new file mode 100644
index 0000000..8032105
--- /dev/null
+++ b/tests/user/testing/labs/abstract_lab/lab.yml
@@ -0,0 +1,22 @@
+type: abstract_lab
+description: An abstract laboratory with abstract devices for testing
+
+devices:
+ D1:
+ type: DT1
+ computer: eos_computer
+ D2:
+ type: DT2
+ computer: eos_computer
+ D3:
+ type: DT3
+ computer: eos_computer
+ D4:
+ type: DT4
+ computer: eos_computer
+ D5:
+ type: DT5
+ computer: eos_computer
+ D6:
+ type: DT6
+ computer: eos_computer
diff --git a/tests/user/testing/labs/multiplication_lab/lab.yml b/tests/user/testing/labs/multiplication_lab/lab.yml
new file mode 100644
index 0000000..c07c0d6
--- /dev/null
+++ b/tests/user/testing/labs/multiplication_lab/lab.yml
@@ -0,0 +1,10 @@
+type: multiplication_lab
+description: An abstract laboratory for testing multiplication
+
+devices:
+ multiplier:
+ type: multiplier
+ computer: eos_computer
+ analyzer:
+ type: analyzer
+ computer: eos_computer
diff --git a/tests/user/testing/labs/multiplication_lab/multiplication.map b/tests/user/testing/labs/multiplication_lab/multiplication.map
new file mode 100644
index 0000000..e69de29
diff --git a/tests/user/testing/labs/small_lab/lab.yml b/tests/user/testing/labs/small_lab/lab.yml
new file mode 100644
index 0000000..1b6bbc0
--- /dev/null
+++ b/tests/user/testing/labs/small_lab/lab.yml
@@ -0,0 +1,131 @@
+type: small_lab
+description: A small laboratory for testing
+
+locations:
+ gc_1:
+ description: Gas Chromatography station 1
+ metadata:
+ map_coordinates: { x: 100, y: 32, rotation: 0 }
+ areas:
+ injection_port:
+ description: Injection port for the gas chromatograph
+
+ gc_2:
+ description: Gas Chromatography station 2
+ metadata:
+ map_coordinates: { x: 110, y: 32, rotation: 0 }
+ areas:
+ injection_port:
+ description: Injection port for the gas chromatograph
+
+ wafer_station:
+ description: Wafer processing station
+ metadata:
+ map_coordinates: { x: 120, y: 32, rotation: 0 }
+ areas:
+ wafer_stack:
+ description: Wafer storage
+ cartesian_robot_head:
+ description: Head of the cartesian robot that holds the wafer
+
+ mixing_station:
+ description: Station equipped with magnetic mixers for substance blending
+ metadata:
+ map_coordinates: { x: 140, y: 32, rotation: 0 }
+
+ substance_shelf:
+ description: Storage shelf for chemical substances
+ metadata:
+ map_coordinates: { x: 50, y: 10, rotation: 0 }
+
+ substance_fridge:
+ description: Refrigerated storage for temperature-sensitive substances
+ metadata:
+ map_coordinates: { x: 60, y: 10, rotation: 0 }
+
+ fetch_charging_station:
+ description: Charging station for the Fetch mobile manipulation robot
+ metadata:
+ map_coordinates: { x: 10, y: 10, rotation: 0 }
+
+devices:
+ general_computer:
+ description: General-purpose computer
+ type: computer
+ location: gc_1
+ computer: eos_computer
+
+ magnetic_mixer:
+ description: Mixer for substance blending
+ type: magnetic_mixer
+ location: mixing_station
+ computer: eos_computer
+
+ magnetic_mixer_2:
+ description: Mixer for substance blending
+ type: magnetic_mixer
+ location: mixing_station
+ computer: eos_computer
+
+ evaporator:
+ description: Evaporator for substance purification
+ type: evaporator
+ location: mixing_station
+ computer: eos_computer
+
+ substance_fridge:
+ description: Fridge for storing temperature-sensitive substances
+ type: fridge
+ location: substance_fridge
+ computer: eos_computer
+
+containers:
+ - type: beaker_250
+ location: substance_shelf
+ metadata:
+ capacity: 250
+ ids:
+ - ec1ca48cd5d14c0c8cde376476e0d98d
+ - 4d8488982b8e404c83465308f6211c25
+ - 8f55ee53aaf4429392993295476b03bc
+ - d29185534fee42749a9f13932dfcb7f2
+ - type: beaker_350
+ metadata:
+ capacity: 350
+ location: substance_shelf
+ ids:
+ - 257b4bf4f13d40a49b60cb20db6bdb8d
+ - 4803e4639b314026a68e7217c5869567
+ - ab0b94897b1e439e90446994c88f1208
+ - type: beaker_500
+ location: substance_shelf
+ metadata:
+ capacity: 500
+ ids:
+ - 026749f8f40342b38157f9824ae2f512
+ - acf829f859e04fee80d54a1ee918555d
+ - a3b958aea8bd435386cdcbab20a2d3ec
+ - 2fe219d41d55449781338ef45f7f49bc
+ - type: vial_20
+ location: substance_shelf
+ metadata:
+ capacity: 20
+ ids:
+ - 84eb17d61e884ffd9d1fdebcbad1532b
+ - daa8748a09ea4e91b32c764fa3e6a3c3
+ - d03d93b6ef114ffba7b5b217362458e4
+ - 51ba54eab0bd4fa08c7ec8dea2d52fa6
+ - e7b25d1ea6844754a55a6c4be2ebbb62
+ - 9c94fcdb276e4909aa0408e287e6986c
+ - b9a14b0e5ee24db0afdc633802698a57
+ - cb895e7a7b814bfab294be9f22a8dc2c
+ - dc8aadece2d64ea59baa1b28d1c62b7b
+ - b1f6cf664cd542e9857314f1470f9efe
+ - 3e128a03dfe44709bf6941032fe42038
+ - efb5ccbaf9b4465c90b1654fac690821
+ - type: flask_250
+ location: substance_shelf
+ metadata:
+ capacity: 250
+ ids:
+ - dd4703461198463e980de42a6034f8de
diff --git a/tests/user/testing/labs/small_lab/small_lab.map b/tests/user/testing/labs/small_lab/small_lab.map
new file mode 100644
index 0000000..e69de29
diff --git a/tests/user/testing/tasks/fridge_temperature_control/task.py b/tests/user/testing/tasks/fridge_temperature_control/task.py
new file mode 100644
index 0000000..9f03dfe
--- /dev/null
+++ b/tests/user/testing/tasks/fridge_temperature_control/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class FridgeTemperatureControlTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/fridge_temperature_control/task.yml b/tests/user/testing/tasks/fridge_temperature_control/task.yml
new file mode 100644
index 0000000..c74d53e
--- /dev/null
+++ b/tests/user/testing/tasks/fridge_temperature_control/task.yml
@@ -0,0 +1,13 @@
+type: Fridge Temperature Control
+description: This task adjusts the temperature of a laboratory refrigerator to a specified target to ensure optimal storage conditions for substances that require precise temperature control.
+
+device_types:
+ - fridge
+
+input_parameters:
+ target_temperature:
+ type: integer
+ unit: celsius
+ min: -20
+ max: 10
+ description: The new temperature for the fridge.
diff --git a/tests/user/testing/tasks/gc_analysis/task.py b/tests/user/testing/tasks/gc_analysis/task.py
new file mode 100644
index 0000000..8758504
--- /dev/null
+++ b/tests/user/testing/tasks/gc_analysis/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class GcAnalysisTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/gc_analysis/task.yml b/tests/user/testing/tasks/gc_analysis/task.yml
new file mode 100644
index 0000000..601ceac
--- /dev/null
+++ b/tests/user/testing/tasks/gc_analysis/task.yml
@@ -0,0 +1,50 @@
+type: GC Analysis
+description: Perform gas chromatography (GC) analysis on a sample.
+
+device_types:
+ - gas_chromatograph
+
+input_parameters:
+ injection_volume:
+ type: integer
+ unit: ul
+ min: 1
+ max: 10
+ description: The volume of the sample to be injected into the GC system.
+
+ oven_temperature_initial:
+ type: integer
+ unit: C
+ min: 40
+ max: 100
+ description: The initial temperature of the GC oven.
+
+ oven_temperature_final:
+ type: integer
+ unit: C
+ min: 150
+ max: 300
+ description: The final temperature of the GC oven, should be higher than the initial temperature.
+
+ temperature_ramp_rate:
+ type: integer
+ unit: C/min
+ min: 1
+ max: 20
+ description: The rate at which the oven temperature increases.
+
+ carrier_gas:
+ type: string
+ description: The type of carrier gas used in the GC analysis, e.g., Helium.
+
+ flow_rate:
+ type: integer
+ unit: ml/min
+ min: 1
+ max: 5
+ description: The flow rate of the carrier gas.
+
+output_parameters:
+ result_folder_path:
+ type: string
+ description: The file path to the folder containing the results of the GC analysis.
diff --git a/tests/user/testing/tasks/gc_injection/task.py b/tests/user/testing/tasks/gc_injection/task.py
new file mode 100644
index 0000000..0142ce0
--- /dev/null
+++ b/tests/user/testing/tasks/gc_injection/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class GcInjectionTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/gc_injection/task.yml b/tests/user/testing/tasks/gc_injection/task.yml
new file mode 100644
index 0000000..6953701
--- /dev/null
+++ b/tests/user/testing/tasks/gc_injection/task.yml
@@ -0,0 +1,10 @@
+type: GC Injection
+description: This task involves the use of a mobile robot to perform sample injection in a GC.
+
+device_types:
+ - mobile_manipulation_robot
+
+input_parameters:
+ gc_target_name:
+ type: string
+ description: The name of the GC target as defined in the GC injection task configuration YAML file.
diff --git a/tests/user/testing/tasks/hplc_analysis/task.py b/tests/user/testing/tasks/hplc_analysis/task.py
new file mode 100644
index 0000000..2394be2
--- /dev/null
+++ b/tests/user/testing/tasks/hplc_analysis/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class HplcAnalysisTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/hplc_analysis/task.yml b/tests/user/testing/tasks/hplc_analysis/task.yml
new file mode 100644
index 0000000..7ba9c26
--- /dev/null
+++ b/tests/user/testing/tasks/hplc_analysis/task.yml
@@ -0,0 +1,68 @@
+type: HPLC Analysis
+
+description: This task performs High-Performance Liquid Chromatography (HPLC) analysis on a sample to separate, identify, and quantify its chemical components.
+
+device_types:
+ - high_performance_liquid_chromatograph
+
+input_containers:
+ vial:
+ type: vial
+
+input_parameters:
+ column:
+ type: choice
+ value: C18
+ choices:
+ - C18
+ - C8
+ - HILIC
+ description: The type of HPLC column to be used for separation.
+
+ mobile_phase_a:
+ type: string
+ value: water
+ description: The first mobile phase component (usually an aqueous solvent).
+
+ mobile_phase_b:
+ type: string
+ value: acetonitrile
+ description: The second mobile phase component (usually an organic solvent).
+
+ gradient:
+ type: string
+ value: "0 min: 5%B, 10 min: 95%B, 12 min: 95%B, 13 min: 5%B, 15 min: 5%B"
+ description: The gradient elution profile, specifying the change in mobile phase composition over time.
+
+ flow_rate:
+ type: decimal
+ unit: ml/min
+ value: 1.0
+ min: 0.1
+ max: 2.0
+ description: The flow rate of the mobile phase through the HPLC column.
+
+ injection_volume:
+ type: integer
+ unit: uL
+ value: 10
+ min: 1
+ max: 100
+ description: The volume of sample injected into the HPLC system.
+
+ detection_wavelength:
+ type: integer
+ unit: nm
+ value: 254
+ min: 190
+ max: 800
+ description: The wavelength at which the detector is set to monitor the eluting compounds.
+
+output_parameters:
+ peak_table_file_path:
+ type: string
+ description: Path to output file summarizing the detected peaks, their retention times, and areas.
+
+ chromatogram_file_path:
+ type: string
+ description: Path to output file of chromatogram data representing the detector response over time.
\ No newline at end of file
diff --git a/tests/user/testing/tasks/magnetic_mixing/task.py b/tests/user/testing/tasks/magnetic_mixing/task.py
new file mode 100644
index 0000000..b76877a
--- /dev/null
+++ b/tests/user/testing/tasks/magnetic_mixing/task.py
@@ -0,0 +1,13 @@
+from eos.tasks.base_task import BaseTask
+
+
+class MagneticMixingTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ output_parameters = {"mixing_time": parameters["time"]}
+
+ return output_parameters, None, None
diff --git a/tests/user/testing/tasks/magnetic_mixing/task.yml b/tests/user/testing/tasks/magnetic_mixing/task.yml
new file mode 100644
index 0000000..1afae50
--- /dev/null
+++ b/tests/user/testing/tasks/magnetic_mixing/task.yml
@@ -0,0 +1,31 @@
+type: Magnetic Mixing
+description: This task involves the use of a magnetic stirrer to blend multiple substances into a homogeneous mixture. Both solid and liquid forms can be mixed to produce a liquid output.
+
+device_types:
+ - magnetic_mixer
+
+input_containers:
+ beaker:
+ type: beaker_500
+
+input_parameters:
+ speed:
+ type: integer
+ unit: rpm
+ value: 10
+ min: 1
+ max: 100
+ description: The speed at which the magnetic stirrer operates, measured in revolutions per minute (rpm).
+ time:
+ type: integer
+ unit: sec
+ value: 360
+ min: 3
+ max: 720
+ description: The total time duration for which the substances will be mixed, measured in seconds.
+
+output_parameters:
+ mixing_time:
+ type: integer
+ unit: sec
+ description: The total time duration for which the substances were mixed, measured in seconds.
\ No newline at end of file
diff --git a/tests/user/testing/tasks/multiplication_lab/compute_multiplication_objective/task.py b/tests/user/testing/tasks/multiplication_lab/compute_multiplication_objective/task.py
new file mode 100644
index 0000000..6dc8863
--- /dev/null
+++ b/tests/user/testing/tasks/multiplication_lab/compute_multiplication_objective/task.py
@@ -0,0 +1,21 @@
+from eos.tasks.base_task import BaseTask
+
+
+class ComputeMultiplicationObjectiveTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ self.cancel_requested = False
+ analyzer = devices.get_all_by_type("analyzer")[0]
+
+ number = parameters["number"]
+ product = parameters["product"]
+
+ objective = analyzer.analyze_result(number, product)
+
+ output_parameters = {"objective": objective}
+
+ return output_parameters, None, None
diff --git a/tests/user/testing/tasks/multiplication_lab/compute_multiplication_objective/task.yml b/tests/user/testing/tasks/multiplication_lab/compute_multiplication_objective/task.yml
new file mode 100644
index 0000000..032e186
--- /dev/null
+++ b/tests/user/testing/tasks/multiplication_lab/compute_multiplication_objective/task.yml
@@ -0,0 +1,21 @@
+type: Compute Multiplication Objective
+description: This task computes the objective for the optimize_multiplication experiment.
+
+device_types:
+ - analyzer
+
+input_parameters:
+ number:
+ type: integer
+ unit: none
+ description: The number to multiply.
+ product:
+ type: integer
+ unit: none
+ description: The final product.
+
+output_parameters:
+ objective:
+ type: integer
+ unit: none
+ description: The objective for the find_smallest_number experiment.
diff --git a/tests/user/testing/tasks/multiplication_lab/multiplication/task.py b/tests/user/testing/tasks/multiplication_lab/multiplication/task.py
new file mode 100644
index 0000000..d40b1e3
--- /dev/null
+++ b/tests/user/testing/tasks/multiplication_lab/multiplication/task.py
@@ -0,0 +1,19 @@
+from eos.tasks.base_task import BaseTask
+
+
+class MultiplicationTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ multiplier = devices.get_all_by_type("multiplier")[0]
+ number = parameters["number"]
+ factor = parameters["factor"]
+
+ product = multiplier.multiply(number, factor)
+
+ output_parameters = {"product": product}
+
+ return output_parameters, None, None
diff --git a/tests/user/testing/tasks/multiplication_lab/multiplication/task.yml b/tests/user/testing/tasks/multiplication_lab/multiplication/task.yml
new file mode 100644
index 0000000..163edd1
--- /dev/null
+++ b/tests/user/testing/tasks/multiplication_lab/multiplication/task.yml
@@ -0,0 +1,21 @@
+type: Multiplication
+description: This task takes a number and a factor and multiplies them together.
+
+device_types:
+ - multiplier
+
+input_parameters:
+ number:
+ type: integer
+ unit: none
+ description: The number to multiply.
+ factor:
+ type: integer
+ unit: none
+ description: The factor to multiply the number by.
+
+output_parameters:
+ product:
+ type: integer
+ unit: none
+ description: The product of the number and the factor.
diff --git a/tests/user/testing/tasks/noop/task.py b/tests/user/testing/tasks/noop/task.py
new file mode 100644
index 0000000..205eb0c
--- /dev/null
+++ b/tests/user/testing/tasks/noop/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class NoopTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/noop/task.yml b/tests/user/testing/tasks/noop/task.yml
new file mode 100644
index 0000000..5c5bcc7
--- /dev/null
+++ b/tests/user/testing/tasks/noop/task.yml
@@ -0,0 +1,2 @@
+type: Noop
+description: This task does nothing.
diff --git a/tests/user/testing/tasks/purification/task.py b/tests/user/testing/tasks/purification/task.py
new file mode 100644
index 0000000..6518b8c
--- /dev/null
+++ b/tests/user/testing/tasks/purification/task.py
@@ -0,0 +1,13 @@
+from eos.tasks.base_task import BaseTask
+
+
+class PurificationTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ output_parameters = {"water_salinity": 0.02}
+
+ return output_parameters, None, None
diff --git a/tests/user/testing/tasks/purification/task.yml b/tests/user/testing/tasks/purification/task.yml
new file mode 100644
index 0000000..516fe29
--- /dev/null
+++ b/tests/user/testing/tasks/purification/task.yml
@@ -0,0 +1,72 @@
+type: Purification
+description: "This task aims to purify a single substance by separating it from its impurities. The device supports two methods: evaporation and simple mixing."
+
+device_types:
+ - evaporator
+
+input_containers:
+ beaker:
+ type: beaker_500
+
+input_parameters:
+ method:
+ type: choice
+ value: evaporation
+ choices:
+ - evaporation
+ - simple_mixing
+ description: The purification method to be used. Choose between evaporation and simple mixing.
+
+ # Evaporation parameters
+ evaporation_time:
+ type: integer
+ unit: sec
+ value: 120
+ min: 60
+ description: Duration of evaporation in seconds.
+ evaporation_temperature:
+ type: integer
+ unit: celsius
+ value: 90
+ min: 30
+ max: 150
+ description: Evaporation temperature in degrees Celsius.
+ evaporation_rotation_speed:
+ type: integer
+ unit: rpm
+ value: 120
+ min: 10
+ max: 300
+ description: Speed of rotation in rpm.
+ evaporation_sparging:
+ type: boolean
+ value: true
+ description: Whether to use sparging gas during evaporation.
+ evaporation_sparging_flow:
+ type: integer
+ unit: ml/min
+ value: 5
+ min: 1
+ max: 10
+ description: Flow rate of sparging gas in ml/min.
+
+ # Simple mixing parameters
+ simple_mixing_time:
+ type: integer
+ unit: sec
+ value: 120
+ min: 60
+ description: Duration of simple mixing in seconds.
+ simple_mixing_rotation_speed:
+ type: integer
+ unit: rpm
+ value: 120
+ min: 10
+ max: 300
+ description: Speed of rotation in rpm.
+
+output_parameters:
+ water_salinity:
+ type: integer
+ unit: ppm
+ description: The salinity of the purified water in parts per million.
diff --git a/tests/user/testing/tasks/robot_arm_container_transfer/task.py b/tests/user/testing/tasks/robot_arm_container_transfer/task.py
new file mode 100644
index 0000000..d1f04c3
--- /dev/null
+++ b/tests/user/testing/tasks/robot_arm_container_transfer/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class RobotArmContainerTransferTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/robot_arm_container_transfer/task.yml b/tests/user/testing/tasks/robot_arm_container_transfer/task.yml
new file mode 100644
index 0000000..b28a49b
--- /dev/null
+++ b/tests/user/testing/tasks/robot_arm_container_transfer/task.yml
@@ -0,0 +1,22 @@
+type: Container Transfer
+description: Transfer a container from one location area to another using a robot arm.
+
+device_types:
+ - fixed_arm_robot
+
+input_parameters:
+ source_location:
+ type: string
+ description: The name of the source location area.
+
+ source_location_area:
+ type: string
+ description: The name of the source location area.
+
+ target_location:
+ type: string
+ description: The name of the target location area.
+
+ target_location_area:
+ type: string
+ description: The name of the target location area.
diff --git a/tests/user/testing/tasks/sleep/task.py b/tests/user/testing/tasks/sleep/task.py
new file mode 100644
index 0000000..da50d47
--- /dev/null
+++ b/tests/user/testing/tasks/sleep/task.py
@@ -0,0 +1,26 @@
+import time
+
+from eos.tasks.base_task import BaseTask
+
+
+class SleepTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ self.cancel_requested = False
+
+ sleep_time = parameters["sleep_time"]
+ start_time = time.time()
+ elapsed = 0
+
+ while elapsed < sleep_time:
+ if self.cancel_requested:
+ self.cancel_requested = False
+ return None
+ time.sleep(1)
+ elapsed = time.time() - start_time
+
+ return None
diff --git a/tests/user/testing/tasks/sleep/task.yml b/tests/user/testing/tasks/sleep/task.yml
new file mode 100644
index 0000000..3a1d992
--- /dev/null
+++ b/tests/user/testing/tasks/sleep/task.yml
@@ -0,0 +1,10 @@
+type: Sleep
+description: This task sleeps for the specified amount of time.
+
+input_parameters:
+ time:
+ type: integer
+ unit: sec
+ value: 0
+ min: 0
+ description: The total time duration for which to sleep for.
diff --git a/tests/user/testing/tasks/wafer_sampling/task.py b/tests/user/testing/tasks/wafer_sampling/task.py
new file mode 100644
index 0000000..6c3b588
--- /dev/null
+++ b/tests/user/testing/tasks/wafer_sampling/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class WaferSamplingTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/wafer_sampling/task.yml b/tests/user/testing/tasks/wafer_sampling/task.yml
new file mode 100644
index 0000000..119f365
--- /dev/null
+++ b/tests/user/testing/tasks/wafer_sampling/task.yml
@@ -0,0 +1,15 @@
+type: Wafer Sampling
+description: Perform wafer sampling with a cartesian robot and pump/valve system.
+
+device_types:
+ - cartesian_robot
+
+input_parameters:
+ wafer_spot:
+ type: list
+ element_type: integer
+ length: 2
+ min: [ -10, -10 ]
+ max: [ 10, 10 ]
+ value: [ 0, 0 ]
+ description: The coordinates of the wafer spot in the wafer station.
diff --git a/tests/user/testing/tasks/weigh_container/task.py b/tests/user/testing/tasks/weigh_container/task.py
new file mode 100644
index 0000000..eb4c93b
--- /dev/null
+++ b/tests/user/testing/tasks/weigh_container/task.py
@@ -0,0 +1,11 @@
+from eos.tasks.base_task import BaseTask
+
+
+class WeighContainerTask(BaseTask):
+ def _execute(
+ self,
+ devices: BaseTask.DevicesType,
+ parameters: BaseTask.ParametersType,
+ containers: BaseTask.ContainersType,
+ ) -> BaseTask.OutputType:
+ pass
diff --git a/tests/user/testing/tasks/weigh_container/task.yml b/tests/user/testing/tasks/weigh_container/task.yml
new file mode 100644
index 0000000..a65ffec
--- /dev/null
+++ b/tests/user/testing/tasks/weigh_container/task.yml
@@ -0,0 +1,19 @@
+type: Weigh Container
+description: This task involves using an analytical balance to accurately measure the mass of a container.
+
+device_types:
+ - balance
+
+input_parameters:
+ minimum_weight:
+ type: decimal
+ unit: g
+ value: 0.1
+ min: 0.0001
+ description: The minimum weight required for the measurement to be considered valid.
+
+output_parameters:
+ weight:
+ type: decimal
+ unit: g
+ description: The measured weight of the container.
\ No newline at end of file
diff --git a/user/.gitkeep b/user/.gitkeep
new file mode 100644
index 0000000..e69de29