diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4e60d89a..0771f050 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -74,7 +74,7 @@ // FIXME: This assumes that either the NETWORK_DIR environment variable is set on the host, or // that the /network directory exists. "source=${localEnv:NETWORK_DIR:/network},target=/network,type=bind,readonly", - // Mount a /tmp on the host machine to /tmp/slurm_tmpdir in the container. + // Mount $SLURM_TMPDIR on the host machine to /tmp/slurm_tmpdir in the container. // note: there's also a SLURM_TMPDIR env variable set to /tmp/slurm_tmpdir in the container. // NOTE: this assumes that either $SLURM_TMPDIR is set on the host machine (e.g. a compute node) // or that `/tmp/slurm_tmpdir` exists on the host machine. @@ -89,7 +89,7 @@ // doesn't fail. "initializeCommand": { "create pdm install cache": "mkdir -p ${SCRATCH?need the SCRATCH environment variable to be set.}/.cache/pdm", // todo: put this on $SCRATCH on the host (e.g. compute node) - "create fake SLURM_TMPDIR": "mkdir -p ${SLURM_TMPDIR?need the SLURM_TMPDIR environment variable to be set.}" // this is fine on compute nodes + "create fake SLURM_TMPDIR": "mkdir -p ${SLURM_TMPDIR:-/tmp/slurm_tmpdir}" // this is fine on compute nodes }, // NOTE: Getting some permission issues with the .cache dir if mounting .cache/pdm to // .cache/pdm in the container. Therefore, here I'm making a symlink from ~/.cache/pdm to diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..4cacfefa --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,19 @@ +name: Publish docs via GitHub Pages +on: + push: + branches: + - master + +jobs: + build: + name: Deploy docs + runs-on: ubuntu-latest + steps: + - name: Checkout main + uses: actions/checkout@v4 + + - name: Deploy docs + uses: mhausenblas/mkdocs-deploy-gh-pages@1.9 + # Or use mhausenblas/mkdocs-deploy-gh-pages@nomaterial to build without the mkdocs-material theme + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 09fc73eb..03f67b11 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -72,16 +72,20 @@ repos: # md formatting - repo: https://github.com/executablebooks/mdformat - rev: 0.7.16 + rev: 0.7.17 hooks: - id: mdformat + exclude: 'SUMMARY.md' args: ["--number"] additional_dependencies: - mdformat-gfm - mdformat-tables - mdformat_frontmatter - # - mdformat-toc - # - mdformat-black + - mdformat-toc + - mdformat-config + - mdformat-black + # see https://github.com/KyleKing/mdformat-mkdocs + - mdformat-mkdocs[recommended]>=2.1.0 require_serial: true diff --git a/README.md b/README.md index a71fa43b..41e50c02 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,8 @@ -# research_template +# Research Project Template ![Build](https://github.com/mila-iqia/ResearchTemplate/workflows/build.yml/badge.svg) [![codecov](https://codecov.io/gh/mila-iqia/ResearchTemplate/graph/badge.svg?token=I2DYLK8NTD)](https://codecov.io/gh/mila-iqia/ResearchTemplate) + +Please note: This is a **Work-in-Progress**. The goal is to make a first release by the end of summer 2024. + +For now, feel free to take a look at the [documentation page](https://mila-iqia.github.io/ResearchTemplate/) if you want more information about this project. diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..914698f4 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,2 @@ +reference.md +reference/* diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md new file mode 100644 index 00000000..716ea5fd --- /dev/null +++ b/docs/SUMMARY.md @@ -0,0 +1,13 @@ +* [Home](index.md) + * Overview + * overview/*.md + * Getting Started + * getting_started/*.md + * Reference + * reference/* + * Examples + * examples/* +* [Tests](tests.md) +* [Related projects](related.md) +* [Getting Help](help.md) +* [Contributing](contributing.md) diff --git a/docs/contributing.md b/docs/contributing.md new file mode 100644 index 00000000..8000bcab --- /dev/null +++ b/docs/contributing.md @@ -0,0 +1,5 @@ +# Contributing + +TODOs: + +- [ ] Describe how to contribute to the project. diff --git a/docs/examples/examples.md b/docs/examples/examples.md new file mode 100644 index 00000000..1258e5cb --- /dev/null +++ b/docs/examples/examples.md @@ -0,0 +1,22 @@ +# Examples + +TODOs: + +- [ ] Show examples (that are also to be tested with doctest or similar) of how to add a new algo. +- [ ] Show examples of how to add a new datamodule. +- [ ] Add a link to the RL example once [#13](https://github.com/mila-iqia/ResearchTemplate/issues/13) is done. +- [ ] Add a link to the NLP example once [#14](https://github.com/mila-iqia/ResearchTemplate/issues/14) is done. +- [ ] Add an example of how to use Jax for the dataset/dataloading: + - Either through an RL example, or with `tfds` in [#18](https://github.com/mila-iqia/ResearchTemplate/issues/18) + +## Simple run + +```bash +python project/main.py algorithm=example_algo datamodule=mnist network=fcnet +``` + +## Running a Hyper-Parameter sweep on a SLURM cluster + +```bash +python project/main.py experiment=cluster_sweep_example +``` diff --git a/docs/examples/jax.md b/docs/examples/jax.md new file mode 100644 index 00000000..607e975c --- /dev/null +++ b/docs/examples/jax.md @@ -0,0 +1,18 @@ +# Using Jax + +You can use Jax for your dataloading, your network, or the learning algorithm, all while still benefiting from the nice stuff that comes from using PyTorch-Lightning. + +How does this work? +Well, we use [torch-jax-interop](https://www.github.com/lebrice/torch_jax_interop), another package developed here at Mila, which allows easy interop between torch and jax code. See the readme on that repo for more details. + +## Example Algorithm that uses Jax + +You can use Jax for your training step, but not the entire training loop (since that is handled by Lightning). +There are a few good reasons why you should let Lightning handle the training loop, most notably the fact that it handles all the logging, checkpointing, and other stuff that you'd lose if you swapped out the entire training framework for something based on Jax. + +In this [example Jax algorithm](https://www.github.com/mila-iqia/ResearchTemplate/tree/master/project/algorithms/jax_algo.py), +a Neural network written in Jax (using [flax](https://flax.readthedocs.io/en/latest/)) is wrapped using the `torch_jax_interop.JaxFunction`, so that its parameters are learnable. The parameters are saved on the LightningModule as nn.Parameters (which use the same underlying memory as the jax arrays). In this example, the loss function is written in PyTorch, while the network forward and backward passes are written in Jax. + +## Example datamodule that uses Jax + +(todo) diff --git a/docs/generate_reference_docs.py b/docs/generate_reference_docs.py new file mode 100644 index 00000000..ae445acb --- /dev/null +++ b/docs/generate_reference_docs.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +# based on https://github.com/mkdocstrings/mkdocstrings/blob/5802b1ef5ad9bf6077974f777bd55f32ce2bc219/docs/gen_doc_stubs.py#L25 + + +import textwrap +from pathlib import Path + +import mkdocs_gen_files +import mkdocs_gen_files.nav + +from project.utils.env_vars import REPO_ROOTDIR + +nav = mkdocs_gen_files.nav.Nav() + + +package = "project" +module = "project/main.py" +submodules = ["project.datamodules", "project.utils", "project.networks", "project.algorithms"] + + +def _get_import_path(module_path: Path) -> str: + """Returns the path to use to import a given (internal) module.""" + return ".".join(module_path.relative_to(REPO_ROOTDIR).with_suffix("").parts) + + +def add_doc_for_module(module_path: Path) -> None: + a = "reference" / (module_path.relative_to(REPO_ROOTDIR).with_suffix(".md")) + module_import_path = _get_import_path(module_path) + + with mkdocs_gen_files.open(a, "w") as f: + print( + textwrap.dedent(f"""\ + ::: {module_import_path} + """), + file=f, + ) + docs_dir = REPO_ROOTDIR / "docs" + module_path_relative_to_docs_dir = module_path.relative_to(docs_dir, walk_up=True) + mkdocs_gen_files.set_edit_path(a, module_path_relative_to_docs_dir) + + +def get_modules(package: Path) -> list[Path]: + return [ + p + for p in package.glob("*.py") + if not p.name.endswith("_test.py") and not p.name == "__init__.py" + ] + + +def get_subpackages(package: Path) -> list[Path]: + return [ + p + for p in package.iterdir() + if p.is_dir() and not p.name.startswith("__") and (p / "__init__.py").exists() + ] + + +project_nav = mkdocs_gen_files.nav.Nav() +with mkdocs_gen_files.open("reference/project/main.md", "w") as f: + print( + textwrap.dedent("""\ + ::: project.main + """), + file=f, + ) +nav["project", "main"] = "project/main.md" +mkdocs_gen_files.set_edit_path("reference/project/main.md", "../project/main.py") + +with mkdocs_gen_files.open("reference/project/experiment.md", "w") as f: + print( + textwrap.dedent("""\ + ::: project.experiment + """), + file=f, + ) +nav["project", "experiment"] = "reference/project/experiment.md" +mkdocs_gen_files.set_edit_path("reference/project/experiment.md", "../project/experiment.py") + +project_utils_nav = mkdocs_gen_files.nav.Nav() +with mkdocs_gen_files.open("reference/project/utils/types.md", "w") as f: + print( + textwrap.dedent("""\ + ::: project.utils.types + options: + show_source: true + """), + file=f, + ) +nav["project", "utils", "types"] = "reference/project/utils/types.md" +mkdocs_gen_files.set_edit_path("reference/project/utils/types.md", "../project/utils/types.py") + + +with mkdocs_gen_files.open("reference.md", "w") as nav_file: + # assert False, "\n".join(nav.build_literate_nav()) + nav_file.writelines(nav.build_literate_nav()) + +# with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as project_nav_file: +# project_nav_file.writelines(project_nav.build_literate_nav()) + + +# project_root = REPO_ROOTDIR / "project" +# for python_module_path in sorted( +# f +# # for f in project_root.glob("*.py") +# for f in [(project_root / "project")] +# if not f.name.endswith("_test.py") and not f.name == "__init__.py" +# ): +# doc_path = python_module_path.relative_to(REPO_ROOTDIR).with_suffix(".md") + +# full_doc_path = Path("reference") / doc_path + +# nav[full_doc_path.with_suffix("").parts] = str(full_doc_path) + +# with mkdocs_gen_files.open(full_doc_path, "w") as f: +# module_import_path = ".".join( +# python_module_path.relative_to(REPO_ROOTDIR).with_suffix("").parts +# ) +# print(f"::: {module_import_path}", file=f) + +# mkdocs_gen_files.set_edit_path( +# full_doc_path, python_module_path.relative_to(REPO_ROOTDIR / "docs", walk_up=True) +# ) + +# nav["mkdocs_autorefs", "references"] = "autorefs/references.md" +# nav["mkdocs_autorefs", "plugin"] = "autorefs/plugin.md" + +# with mkdocs_gen_files.open("reference.md", "w") as nav_file: +# nav_file.writelines(nav.build_literate_nav()) diff --git a/docs/getting_started/install.md b/docs/getting_started/install.md new file mode 100644 index 00000000..2384c9ff --- /dev/null +++ b/docs/getting_started/install.md @@ -0,0 +1,73 @@ +# Installation instructions + +There are two ways to install this project + +1. Using Conda (recommended for newcomers) +2. Using a development container (recommended if you are able to install Docker on your machine) + +## Using Conda and pip + +### Prerequisites + +You need to have [Conda](https://docs.conda.io/en/latest/) installed on your machine. + +### Installation + +1. Clone the repository and navigate to the root directory: + + ```bash + git clone https://www.github.com/mila-iqia/ResearchTemplate + cd ResearchTemplate + ``` + +2. Create a conda environment + + ```bash + conda create -n research_template python=3.12 + conda activate research_template + ``` + + Notes: + + - If you don't Conda installed, you can download it from [here](https://docs.conda.io/en/latest/miniconda.html). + - If you'd rather use a virtual environment instead of Conda, you can totally do so, as long as you have a version of Python >= 3.12. + + + +3. Install the package using pip: + + ```bash + pip install -e . + ``` + + Optionally, you can also install the package using [PDM](https://pdm-project.org/en/latest/). This makes it easier to add or change the dependencies later on: + + ```bash + pip install pdm + pdm install + ``` + +## Using a development container + +This repo provides a [Devcontainer](https://code.visualstudio.com/docs/remote/containers) configuration for [Visual Studio Code](https://code.visualstudio.com/) to use a Docker container as a pre-configured development environment. This avoids struggles setting up a development environment and makes them reproducible and consistent. and make yourself familiar with the [container tutorials](https://code.visualstudio.com/docs/remote/containers-tutorial) if you want to use them. In order to use GPUs, you can enable them within the `.devcontainer/devcontainer.json` file. + +1. Setup Docker on your local machine + + On an Linux machine where you have root access, you can install Docker using the following commands: + + ```bash + curl -fsSL https://get.docker.com -o get-docker.sh + sudo sh get-docker.sh + ``` + + On Windows or Mac, follow [these installation instructions](https://code.visualstudio.com/docs/remote/containers#_installation) + +2. (optional) Install the [nvidia-container-toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) to use your local machine's GPU(s). + +3. Install the [Dev Containers extension](vscode:extension/ms-vscode-remote.remote-containers) for Visual Studio Code. + +4. When opening repository in Visual Studio Code, you should be prompted to reopen the repository in a container: + + ![VsCode popup image](https://github.com/mila-iqia/ResearchTemplate/assets/13387299/37d00ce7-1214-44b2-b1d6-411ee286999f) + + Alternatively, you can open the command palette (Ctrl+Shift+P) and select `Dev Containers: Rebuild and Reopen in Container`. diff --git a/docs/help.md b/docs/help.md new file mode 100644 index 00000000..21adfa1e --- /dev/null +++ b/docs/help.md @@ -0,0 +1,5 @@ +# Help and Support + +## FAQ + +## How to get help diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..ceeccb61 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,51 @@ +# Research Project Template + +[![Build](https://github.com/mila-iqia/ResearchTemplate/actions/workflows/build.yml/badge.svg?branch=master)](https://github.com/mila-iqia/ResearchTemplate/actions/workflows/build.yml) +[![codecov](https://codecov.io/gh/mila-iqia/ResearchTemplate/graph/badge.svg?token=I2DYLK8NTD)](https://codecov.io/gh/mila-iqia/ResearchTemplate) +[![hydra](https://img.shields.io/badge/Config-Hydra_1.3-89b8cd)](https://hydra.cc/) +[![license](https://img.shields.io/badge/License-MIT-green.svg?labelColor=gray)](https://github.com/mila-iqia/ResearchTemplate#license) + +Please note: This is a Work-in-Progress. The goal is to make a first release by the end of summer 2024. + +This is a research project template. It is meant to be a starting point for ML researchers at [Mila](https://mila.quebec/en). + +For more context, see [this introduction to the project.](overview/intro.md). + +## Overview + +This project makes use of the following libraries: + +- [Hydra](https://hydra.cc/) is used to configure the project. It allows you to define configuration files and override them from the command line. +- [PyTorch Lightning](https://lightning.ai/docs/pytorch/stable/) is used to as the training framework. It provides a high-level interface to organize ML research code. + - ๐Ÿ”ฅ Please note: You can also use [Jax](https://jax.readthedocs.io/en/latest/) with this repo, as is shown in the [Jax example](examples/examples.md#using-jax) ๐Ÿ”ฅ +- [Weights & Biases](https://wandb.ai) is used to log metrics and visualize results. +- [pytest](https://docs.pytest.org/en/stable/) is used for testing. + +## Usage + +To see all available options: + +```bash +python project/main.py --help +``` + +For a detailed list of examples, see the [examples page](examples/examples.md). + + + +## Project layout + +``` +pyproject.toml # Project metadata and dependencies +project/ + main.py # main entry-point + algorithms/ # learning algorithms + datamodules/ # datasets, processing and loading + networks/ # Neural networks used by algorithms + configs/ # configuration files +docs/ # documentation +conftest.py # Test fixtures and utilities +``` diff --git a/docs/overview/intro.md b/docs/overview/intro.md new file mode 100644 index 00000000..ee707a2b --- /dev/null +++ b/docs/overview/intro.md @@ -0,0 +1,36 @@ +# Introduction + +## Why should you use this template? + +### Why should you use *a* template in the first place? + +For many good reasons, which are very well described [here in a similar project](https://cookiecutter-data-science.drivendata.org/why/)! ๐Ÿ˜Š + +Other good reads: + +- [https://cookiecutter-data-science.drivendata.org/why/](https://cookiecutter-data-science.drivendata.org/why/) +- [https://cookiecutter-data-science.drivendata.org/opinions/](https://cookiecutter-data-science.drivendata.org/opinions/) +- [https://12factor.net/](https://12factor.net/) +- [https://github.com/ashleve/lightning-hydra-template/tree/main?tab=readme-ov-file#main-ideas](https://github.com/ashleve/lightning-hydra-template/tree/main?tab=readme-ov-file#main-ideas) + +### Why should you use *this* template (instead of another)? + +You are welcome (and encouraged) to use other similar templates which, at the time of writing this, have significantly better documentation. However, there are several advantages to using this particular template: + +- โ—Support for both Jax and Torch with PyTorch-Lightning โ— +- Easy development inside a devcontainer with VsCode +- Tailor-made for ML researchers that run their jobs on SLURM clusters (with default configurations for the [Mila](https://docs.mila.quebec) and [DRAC](https://docs.alliancecan.ca) clusters.) +- Rich typing of all parts of the source code using Python 3.12's new type annotation syntax +- A comprehensive suite of automated tests for new algorithms, datasets and networks +- Automatically creates Yaml Schemas for your Hydra config files (as soon as #7 is merged) + +This template is aimed for ML researchers that run their jobs on SLURM clusters. +The target audience is researchers and students at [Mila](https://mila.quebec). This template should still be useful for others outside of Mila that use PyTorch-Lightning and Hydra. + +## Main concepts + +### Datamodule + +### Network + +### Algorithm diff --git a/docs/related.md b/docs/related.md new file mode 100644 index 00000000..1cb08835 --- /dev/null +++ b/docs/related.md @@ -0,0 +1,21 @@ +# Related projects and resources + +There are other very similar projects with significantly better documentation. In all cases that involve Hydra and PyTorch-Lightning, this documentation also applies directly to this project, so in order to avoid copying their documentation, here are some links: + +- [lightning-hydra-template](https://github.com/ashleve/lightning-hydra-template) + + - How it works: https://github.com/gorodnitskiy/yet-another-lightning-hydra-template/tree/main?tab=readme-ov-file#workflow---how-it-works + +- [yet-another-lightning-hydra-template](https://github.com/gorodnitskiy/yet-another-lightning-hydra-template) + + - Excellent template. based on the lightning-hydra-template. Great documentation, which is referenced extensively in this project. + - - Has a **great** Readme with lots of information + - - Is really well organized + - - doesn't support Jax + - - doesn't have a devcontainer + - Great blog: https://hackernoon.com/yet-another-lightning-hydra-template-for-ml-experiments + +- [cookiecutter-data-science](https://github.com/drivendataorg/cookiecutter-data-science) + + - Awesome library for data science. + - Related projects: https://github.com/drivendataorg/cookiecutter-data-science/blob/master/docs/docs/related.md#links-to-related-projects-and-references diff --git a/docs/tests.md b/docs/tests.md new file mode 100644 index 00000000..ccc911a2 --- /dev/null +++ b/docs/tests.md @@ -0,0 +1,10 @@ +# Tests + +TODOs: + +- [ ] Described what is tested by the included automated tests (a bit like what is done [here](https://github.com/gorodnitskiy/yet-another-lightning-hydra-template?tab=readme-ov-file#tests)) +- [ ] Add some examples of how to run tests +- [ ] describe why the test files are next to the source files, and why TDD is good, and why ML researchers should care more about tests. +- [ ] Explain how the fixtures in `conftest.py` work (indirect parametrization of the command-line overrides, etc). +- [ ] Describe the Github Actions workflows that come with the template, and how to setup a self-hosted runner for template forks. +- [ ] Add links to relevant documentation () diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..346c9b0b --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,54 @@ +site_name: Research Project Template (wip) +site_description: A project template and directory structure for Python data science projects. (Work-in-Progress) +site_url: https://mila-iqia.github.io/ResearchTemplate/ +repo_url: https://www.github.com/mila-iqia/ResearchTemplate +# edit_uri: edit/master/docs + +theme: material +markdown_extensions: + - toc: + permalink: "#" + toc_depth: 3 + - pymdownx.highlight + - pymdownx.magiclink + - pymdownx.superfences + +plugins: +- search +- literate-nav: + nav_file: SUMMARY.md +- awesome-pages +- gen-files: + # https://oprypin.github.io/mkdocs-gen-files/#usage + scripts: + - docs/generate_reference_docs.py +- mkdocstrings: + handlers: + python: + import: + - https://docs.python-requests.org/en/master/objects.inv + options: + docstring_style: google + members_order: source + annotations_path: brief + show_docstring_attributes: true + modernize_annotations: true + show_source: false + show_submodules: false + separate_signature: true + signature_crossrefs: true + show_signature_annotations: true + allow_inspection: true + +# todo: take a look at https://github.com/drivendataorg/cookiecutter-data-science/blob/master/docs/mkdocs.yml +# - admonition +# - pymdownx.details +# - pymdownx.superfences +# - pymdownx.tabbed: +# alternate_style: true +# slugify: !!python/object/apply:pymdownx.slugs.slugify +# kwds: +# case: lower +# - tables +# - toc: +# toc_depth: 2 diff --git a/pdm.lock b/pdm.lock index 1eb350cd..c1c62298 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.1" -content_hash = "sha256:2e4f9911bfebcfc3a32ec5f7c9257db49d5c51fde8087009c4b966873563c79c" +content_hash = "sha256:9e1397c47b842e910d32a691bc3082a1bf8b4224e82c49e442d43c4f44e66e12" [[package]] name = "absl-py" @@ -75,6 +75,17 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[[package]] +name = "ansicon" +version = "1.89.0" +summary = "Python wrapper for loading Jason Hood's ANSICON" +groups = ["default"] +marker = "platform_system == \"Windows\"" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + [[package]] name = "antlr4-python3-runtime" version = "4.9.3" @@ -84,6 +95,19 @@ files = [ {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, ] +[[package]] +name = "asttokens" +version = "2.4.1" +summary = "Annotate AST trees with source code positions" +groups = ["default"] +dependencies = [ + "six>=1.12.0", +] +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + [[package]] name = "attrs" version = "23.2.0" @@ -95,6 +119,53 @@ files = [ {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] +[[package]] +name = "babel" +version = "2.15.0" +requires_python = ">=3.8" +summary = "Internationalization utilities" +groups = ["dev"] +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[[package]] +name = "bcrypt" +version = "4.1.3" +requires_python = ">=3.7" +summary = "Modern password hashing for your software and your servers" +groups = ["default"] +files = [ + {file = "bcrypt-4.1.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:48429c83292b57bf4af6ab75809f8f4daf52aa5d480632e53707805cc1ce9b74"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8bea4c152b91fd8319fef4c6a790da5c07840421c2b785084989bf8bbb7455"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d3b317050a9a711a5c7214bf04e28333cf528e0ed0ec9a4e55ba628d0f07c1a"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:094fd31e08c2b102a14880ee5b3d09913ecf334cd604af27e1013c76831f7b05"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4fb253d65da30d9269e0a6f4b0de32bd657a0208a6f4e43d3e645774fb5457f3"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:193bb49eeeb9c1e2db9ba65d09dc6384edd5608d9d672b4125e9320af9153a15"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8cbb119267068c2581ae38790e0d1fbae65d0725247a930fc9900c285d95725d"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6cac78a8d42f9d120b3987f82252bdbeb7e6e900a5e1ba37f6be6fe4e3848286"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01746eb2c4299dd0ae1670234bf77704f581dd72cc180f444bfe74eb80495b64"}, + {file = "bcrypt-4.1.3-cp37-abi3-win32.whl", hash = "sha256:037c5bf7c196a63dcce75545c8874610c600809d5d82c305dd327cd4969995bf"}, + {file = "bcrypt-4.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:8a893d192dfb7c8e883c4576813bf18bb9d59e2cfd88b68b725990f033f1b978"}, + {file = "bcrypt-4.1.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d4cf6ef1525f79255ef048b3489602868c47aea61f375377f0d00514fe4a78c"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5698ce5292a4e4b9e5861f7e53b1d89242ad39d54c3da451a93cac17b61921a"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec3c2e1ca3e5c4b9edb94290b356d082b721f3f50758bce7cce11d8a7c89ce84"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3a5be252fef513363fe281bafc596c31b552cf81d04c5085bc5dac29670faa08"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5f7cd3399fbc4ec290378b541b0cf3d4398e4737a65d0f938c7c0f9d5e686611"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:c4c8d9b3e97209dd7111bf726e79f638ad9224b4691d1c7cfefa571a09b1b2d6"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:31adb9cbb8737a581a843e13df22ffb7c84638342de3708a98d5c986770f2834"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:551b320396e1d05e49cc18dd77d970accd52b322441628aca04801bbd1d52a73"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6717543d2c110a155e6821ce5670c1f512f602eabb77dba95717ca76af79867d"}, + {file = "bcrypt-4.1.3-cp39-abi3-win32.whl", hash = "sha256:6004f5229b50f8493c49232b8e75726b568535fd300e5039e255d919fc3a07f2"}, + {file = "bcrypt-4.1.3-cp39-abi3-win_amd64.whl", hash = "sha256:2505b54afb074627111b5a8dc9b6ae69d0f01fea65c2fcaea403448c503d3991"}, + {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:cb9c707c10bddaf9e5ba7cdb769f3e889e60b7d4fea22834b261f51ca2b89fed"}, + {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9f8ea645eb94fb6e7bea0cf4ba121c07a3a182ac52876493870033141aa687bc"}, + {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f44a97780677e7ac0ca393bd7982b19dbbd8d7228c1afe10b128fd9550eef5f1"}, + {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d84702adb8f2798d813b17d8187d27076cca3cd52fe3686bb07a9083930ce650"}, + {file = "bcrypt-4.1.3.tar.gz", hash = "sha256:2ee15dd749f5952fe3f0430d0ff6b74082e159c50332a1413d51b5689cf06623"}, +] + [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -109,6 +180,44 @@ files = [ {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] +[[package]] +name = "black" +version = "24.4.2" +requires_python = ">=3.8" +summary = "The uncompromising code formatter." +groups = ["dev"] +dependencies = [ + "click>=8.0.0", + "mypy-extensions>=0.4.3", + "packaging>=22.0", + "pathspec>=0.9.0", + "platformdirs>=2", +] +files = [ + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[[package]] +name = "blessed" +version = "1.20.0" +requires_python = ">=2.7" +summary = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +groups = ["default"] +dependencies = [ + "jinxed>=1.1.0; platform_system == \"Windows\"", + "six>=1.9.0", + "wcwidth>=0.1.4", +] +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] + [[package]] name = "blinker" version = "1.8.2" @@ -120,6 +229,17 @@ files = [ {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] +[[package]] +name = "bracex" +version = "2.4" +requires_python = ">=3.8" +summary = "Bash style brace expander." +groups = ["dev"] +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + [[package]] name = "brax" version = "0.10.5" @@ -161,18 +281,41 @@ name = "certifi" version = "2024.6.2" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] +[[package]] +name = "cffi" +version = "1.16.0" +requires_python = ">=3.8" +summary = "Foreign Function Interface for Python calling C code." +groups = ["default"] +dependencies = [ + "pycparser", +] +files = [ + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + [[package]] name = "charset-normalizer" version = "3.3.2" requires_python = ">=3.7.0" summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, @@ -218,7 +361,7 @@ name = "click" version = "8.1.7" requires_python = ">=3.7" summary = "Composable command line interface toolkit" -groups = ["default"] +groups = ["default", "dev"] dependencies = [ "colorama; platform_system == \"Windows\"", ] @@ -238,13 +381,38 @@ files = [ {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, ] +[[package]] +name = "codefind" +version = "0.1.6" +requires_python = "<4.0,>=3.8" +summary = "Find code objects and their referents" +groups = ["default"] +marker = "python_version ~= \"3.8\"" +files = [ + {file = "codefind-0.1.6-py3-none-any.whl", hash = "sha256:0a3b8a441d881a4ba81bede611c11deb56c920a6fcee038dc4bd6e3e6869c67b"}, + {file = "codefind-0.1.6.tar.gz", hash = "sha256:2447fb7c09a57369e131f8087be1640304500d960f742e65c7d9b07ed00fff19"}, +] + +[[package]] +name = "coleo" +version = "0.3.3" +requires_python = ">=3.7,<4.0" +summary = "The nicest way to develop a command-line interface" +groups = ["default"] +dependencies = [ + "ptera<2.0.0,>=1.4.1", +] +files = [ + {file = "coleo-0.3.3-py3-none-any.whl", hash = "sha256:002da5966836c59e1d69a19f93759920043db40f612c8ffe3f87231379bd1b03"}, + {file = "coleo-0.3.3.tar.gz", hash = "sha256:33d93991ad205cf1eebfbb5c1df34c0aa8eceda89b59d0970c4850dbbe53914a"}, +] + [[package]] name = "colorama" version = "0.4.6" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" summary = "Cross-platform colored terminal text." groups = ["default", "dev"] -marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -347,6 +515,50 @@ files = [ {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, ] +[[package]] +name = "cryptography" +version = "42.0.8" +requires_python = ">=3.7" +summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +groups = ["default"] +dependencies = [ + "cffi>=1.12; platform_python_implementation != \"PyPy\"", +] +files = [ + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, +] + [[package]] name = "cycler" version = "0.12.1" @@ -504,6 +716,31 @@ files = [ {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, ] +[[package]] +name = "executing" +version = "1.2.0" +summary = "Get the currently executing AST node of a frame, and other information" +groups = ["default"] +files = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] + +[[package]] +name = "fabric" +version = "2.7.1" +summary = "High level SSH command execution" +groups = ["default"] +dependencies = [ + "invoke<2.0,>=1.3", + "paramiko>=2.4", + "pathlib2", +] +files = [ + {file = "fabric-2.7.1-py2.py3-none-any.whl", hash = "sha256:7610362318ef2d391cc65d4befb684393975d889ed5720f23499394ec0e136fa"}, + {file = "fabric-2.7.1.tar.gz", hash = "sha256:76f8fef59cf2061dbd849bbce4fe49bdd820884385004b0ca59136ac3db129e4"}, +] + [[package]] name = "farama-notifications" version = "0.0.4" @@ -670,6 +907,19 @@ files = [ {file = "gdown-5.2.0.tar.gz", hash = "sha256:2145165062d85520a3cd98b356c9ed522c5e7984d408535409fd46f94defc787"}, ] +[[package]] +name = "ghp-import" +version = "2.1.0" +summary = "Copy your docs directly to the gh-pages branch." +groups = ["dev"] +dependencies = [ + "python-dateutil>=2.8.1", +] +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + [[package]] name = "gitdb" version = "4.0.11" @@ -698,6 +948,22 @@ files = [ {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, ] +[[package]] +name = "giving" +version = "0.4.2" +requires_python = ">=3.7,<4.0" +summary = "Reactive logging" +groups = ["default"] +dependencies = [ + "asttokens<3.0.0,>=2.2.1", + "reactivex<5.0.0,>=4.0.0", + "varname<0.11.0,>=0.10.0", +] +files = [ + {file = "giving-0.4.2-py3-none-any.whl", hash = "sha256:24c239fd6d3b58e38c1c847b7b553f8d8c9474f9f479341c56dead82facdf2fd"}, + {file = "giving-0.4.2.tar.gz", hash = "sha256:d122c73b4c7d6ba7da277ac104ef3fce24e0ba3ff165427032d6446a129563e8"}, +] + [[package]] name = "glfw" version = "2.7.0" @@ -715,6 +981,20 @@ files = [ {file = "glfw-2.7.0.tar.gz", hash = "sha256:0e209ad38fa8c5be67ca590d7b17533d95ad1eb57d0a3f07b98131db69b79000"}, ] +[[package]] +name = "griffe" +version = "0.47.0" +requires_python = ">=3.8" +summary = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +groups = ["dev"] +dependencies = [ + "colorama>=0.4", +] +files = [ + {file = "griffe-0.47.0-py3-none-any.whl", hash = "sha256:07a2fd6a8c3d21d0bbb0decf701d62042ccc8a576645c7f8799fe1f10de2b2de"}, + {file = "griffe-0.47.0.tar.gz", hash = "sha256:95119a440a3c932b13293538bdbc405bee4c36428547553dc6b327e7e7d35e5a"}, +] + [[package]] name = "grpcio" version = "1.64.1" @@ -862,7 +1142,7 @@ name = "idna" version = "3.7" requires_python = ">=3.5" summary = "Internationalized Domain Names in Applications (IDNA)" -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, @@ -939,6 +1219,16 @@ files = [ {file = "intel_openmp-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:eef4c8bcc8acefd7f5cd3b9384dbf73d59e2c99fc56545712ded913f43c4a94f"}, ] +[[package]] +name = "invoke" +version = "1.7.3" +summary = "Pythonic task execution" +groups = ["default"] +files = [ + {file = "invoke-1.7.3-py3-none-any.whl", hash = "sha256:d9694a865764dd3fd91f25f7e9a97fb41666e822bbb00e670091e3f43933574d"}, + {file = "invoke-1.7.3.tar.gz", hash = "sha256:41b428342d466a82135d5ab37119685a989713742be46e42a3a399d685579314"}, +] + [[package]] name = "itsdangerous" version = "2.2.0" @@ -1073,6 +1363,20 @@ files = [ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] +[[package]] +name = "jinxed" +version = "1.2.1" +summary = "Jinxed Terminal Library" +groups = ["default"] +marker = "platform_system == \"Windows\"" +dependencies = [ + "ansicon; platform_system == \"Windows\"", +] +files = [ + {file = "jinxed-1.2.1-py2.py3-none-any.whl", hash = "sha256:37422659c4925969c66148c5e64979f553386a4226b9484d910d3094ced37d30"}, + {file = "jinxed-1.2.1.tar.gz", hash = "sha256:30c3f861b73279fea1ed928cfd4dfb1f273e16cd62c8a32acfac362da0f78f3f"}, +] + [[package]] name = "kiwisolver" version = "1.4.5" @@ -1157,7 +1461,7 @@ name = "markdown" version = "3.6" requires_python = ">=3.8" summary = "Python implementation of John Gruber's Markdown." -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, @@ -1239,6 +1543,224 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "mergedeep" +version = "1.3.4" +requires_python = ">=3.6" +summary = "A deep merge function for ๐Ÿ." +groups = ["dev"] +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "milatools" +version = "0.0.18" +requires_python = ">=3.7,<4.0" +summary = "Tools to work with the Mila cluster" +groups = ["default"] +dependencies = [ + "Fabric<3.0.0,>=2.7.0", + "blessed<2.0.0,>=1.18.1", + "coleo<0.4.0,>=0.3.0", + "questionary<2.0.0,>=1.10.0", + "sshconf<0.3.0,>=0.2.2", +] +files = [ + {file = "milatools-0.0.18-py3-none-any.whl", hash = "sha256:0483058e0a3491dd38c48e2d5d72eb0d30fe319165ccd808b3b9fcb018af05db"}, + {file = "milatools-0.0.18.tar.gz", hash = "sha256:84a41a93e41c142acec8919b602c4a74ca892bd6172be118bd1b1be815676801"}, +] + +[[package]] +name = "mkdocs" +version = "1.6.0" +requires_python = ">=3.8" +summary = "Project documentation with Markdown." +groups = ["dev"] +dependencies = [ + "click>=7.0", + "colorama>=0.4; platform_system == \"Windows\"", + "ghp-import>=1.0", + "jinja2>=2.11.1", + "markdown>=3.3.6", + "markupsafe>=2.0.1", + "mergedeep>=1.3.4", + "mkdocs-get-deps>=0.2.0", + "packaging>=20.5", + "pathspec>=0.11.1", + "pyyaml-env-tag>=0.1", + "pyyaml>=5.1", + "watchdog>=2.0", +] +files = [ + {file = "mkdocs-1.6.0-py3-none-any.whl", hash = "sha256:1eb5cb7676b7d89323e62b56235010216319217d4af5ddc543a91beb8d125ea7"}, + {file = "mkdocs-1.6.0.tar.gz", hash = "sha256:a73f735824ef83a4f3bcb7a231dcab23f5a838f88b7efc54a0eef5fbdbc3c512"}, +] + +[[package]] +name = "mkdocs-autorefs" +version = "1.0.1" +requires_python = ">=3.8" +summary = "Automatically link across pages in MkDocs." +groups = ["dev"] +dependencies = [ + "Markdown>=3.3", + "markupsafe>=2.0.1", + "mkdocs>=1.1", +] +files = [ + {file = "mkdocs_autorefs-1.0.1-py3-none-any.whl", hash = "sha256:aacdfae1ab197780fb7a2dac92ad8a3d8f7ca8049a9cbe56a4218cd52e8da570"}, + {file = "mkdocs_autorefs-1.0.1.tar.gz", hash = "sha256:f684edf847eced40b570b57846b15f0bf57fb93ac2c510450775dcf16accb971"}, +] + +[[package]] +name = "mkdocs-awesome-pages-plugin" +version = "2.9.2" +requires_python = ">=3.7" +summary = "An MkDocs plugin that simplifies configuring page titles and their order" +groups = ["dev"] +dependencies = [ + "mkdocs>=1", + "natsort>=8.1.0", + "wcmatch>=7", +] +files = [ + {file = "mkdocs_awesome_pages_plugin-2.9.2-py3-none-any.whl", hash = "sha256:9c795587695bd1ee85a8b7e43293005418df5a8b9ef296a3e628be427b693b4d"}, + {file = "mkdocs_awesome_pages_plugin-2.9.2.tar.gz", hash = "sha256:c3f7d366ecfe99b64524c49a84d8e13c576c19a918ea2e6f59bb486a259313af"}, +] + +[[package]] +name = "mkdocs-gen-files" +version = "0.5.0" +requires_python = ">=3.7" +summary = "MkDocs plugin to programmatically generate documentation pages during the build" +groups = ["dev"] +dependencies = [ + "mkdocs>=1.0.3", +] +files = [ + {file = "mkdocs_gen_files-0.5.0-py3-none-any.whl", hash = "sha256:7ac060096f3f40bd19039e7277dd3050be9a453c8ac578645844d4d91d7978ea"}, + {file = "mkdocs_gen_files-0.5.0.tar.gz", hash = "sha256:4c7cf256b5d67062a788f6b1d035e157fc1a9498c2399be9af5257d4ff4d19bc"}, +] + +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +requires_python = ">=3.8" +summary = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +groups = ["dev"] +dependencies = [ + "mergedeep>=1.3.4", + "platformdirs>=2.2.0", + "pyyaml>=5.1", +] +files = [ + {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, + {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, +] + +[[package]] +name = "mkdocs-literate-nav" +version = "0.6.1" +requires_python = ">=3.7" +summary = "MkDocs plugin to specify the navigation in Markdown instead of YAML" +groups = ["dev"] +dependencies = [ + "mkdocs>=1.0.3", +] +files = [ + {file = "mkdocs_literate_nav-0.6.1-py3-none-any.whl", hash = "sha256:e70bdc4a07050d32da79c0b697bd88e9a104cf3294282e9cb20eec94c6b0f401"}, + {file = "mkdocs_literate_nav-0.6.1.tar.gz", hash = "sha256:78a7ab6d878371728acb0cdc6235c9b0ffc6e83c997b037f4a5c6ff7cef7d759"}, +] + +[[package]] +name = "mkdocs-material" +version = "9.5.28" +requires_python = ">=3.8" +summary = "Documentation that simply works" +groups = ["dev"] +dependencies = [ + "babel~=2.10", + "colorama~=0.4", + "jinja2~=3.0", + "markdown~=3.2", + "mkdocs-material-extensions~=1.3", + "mkdocs~=1.6", + "paginate~=0.5", + "pygments~=2.16", + "pymdown-extensions~=10.2", + "regex>=2022.4", + "requests~=2.26", +] +files = [ + {file = "mkdocs_material-9.5.28-py3-none-any.whl", hash = "sha256:ff48b11b2a9f705dd210409ec3b418ab443dd36d96915bcba45a41f10ea27bfd"}, + {file = "mkdocs_material-9.5.28.tar.gz", hash = "sha256:9cba305283ad1600e3d0a67abe72d7a058b54793b47be39930911a588fe0336b"}, +] + +[[package]] +name = "mkdocs-material-extensions" +version = "1.3.1" +requires_python = ">=3.8" +summary = "Extension pack for Python Markdown and MkDocs Material." +groups = ["dev"] +files = [ + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.25.1" +requires_python = ">=3.8" +summary = "Automatic documentation from sources, for MkDocs." +groups = ["dev"] +dependencies = [ + "Jinja2>=2.11.1", + "Markdown>=3.3", + "MarkupSafe>=1.1", + "click>=7.0", + "mkdocs-autorefs>=0.3.1", + "mkdocs>=1.4", + "platformdirs>=2.2.0", + "pymdown-extensions>=6.3", +] +files = [ + {file = "mkdocstrings-0.25.1-py3-none-any.whl", hash = "sha256:da01fcc2670ad61888e8fe5b60afe9fee5781017d67431996832d63e887c2e51"}, + {file = "mkdocstrings-0.25.1.tar.gz", hash = "sha256:c3a2515f31577f311a9ee58d089e4c51fc6046dbd9e9b4c3de4c3194667fe9bf"}, +] + +[[package]] +name = "mkdocstrings-python" +version = "1.10.5" +requires_python = ">=3.8" +summary = "A Python handler for mkdocstrings." +groups = ["dev"] +dependencies = [ + "griffe>=0.47", + "mkdocstrings>=0.25", +] +files = [ + {file = "mkdocstrings_python-1.10.5-py3-none-any.whl", hash = "sha256:92e3c588ef1b41151f55281d075de7558dd8092e422cb07a65b18ee2b0863ebb"}, + {file = "mkdocstrings_python-1.10.5.tar.gz", hash = "sha256:acdc2a98cd9d46c7ece508193a16ca03ccabcb67520352b7449f84b57c162bdf"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.25.1" +extras = ["python"] +requires_python = ">=3.8" +summary = "Automatic documentation from sources, for MkDocs." +groups = ["dev"] +dependencies = [ + "mkdocstrings-python>=0.5.2", + "mkdocstrings==0.25.1", +] +files = [ + {file = "mkdocstrings-0.25.1-py3-none-any.whl", hash = "sha256:da01fcc2670ad61888e8fe5b60afe9fee5781017d67431996832d63e887c2e51"}, + {file = "mkdocstrings-0.25.1.tar.gz", hash = "sha256:c3a2515f31577f311a9ee58d089e4c51fc6046dbd9e9b4c3de4c3194667fe9bf"}, +] + [[package]] name = "mkl" version = "2021.4.0" @@ -1411,6 +1933,28 @@ files = [ {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] +[[package]] +name = "mypy-extensions" +version = "1.0.0" +requires_python = ">=3.5" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "natsort" +version = "8.4.0" +requires_python = ">=3.7" +summary = "Simple yet flexible natural sorting in Python." +groups = ["dev"] +files = [ + {file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"}, + {file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"}, +] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -1694,6 +2238,15 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +summary = "Divides large result sets into pages for easier browsing" +groups = ["dev"] +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "pandas" version = "2.2.2" @@ -1717,6 +2270,46 @@ files = [ {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] +[[package]] +name = "paramiko" +version = "3.4.0" +requires_python = ">=3.6" +summary = "SSH2 protocol library" +groups = ["default"] +dependencies = [ + "bcrypt>=3.2", + "cryptography>=3.3", + "pynacl>=1.5", +] +files = [ + {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, + {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, +] + +[[package]] +name = "pathlib2" +version = "2.3.7.post1" +summary = "Object-oriented filesystem paths" +groups = ["default"] +dependencies = [ + "six", +] +files = [ + {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, + {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +requires_python = ">=3.8" +summary = "Utility library for gitignore style pattern matching of file paths." +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pillow" version = "10.3.0" @@ -1757,7 +2350,7 @@ name = "platformdirs" version = "4.2.2" requires_python = ">=3.8" summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, @@ -1787,6 +2380,20 @@ files = [ {file = "proglog-0.1.10.tar.gz", hash = "sha256:658c28c9c82e4caeb2f25f488fff9ceace22f8d69b15d0c1c86d64275e4ddab4"}, ] +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +requires_python = ">=3.7.0" +summary = "Library for building powerful interactive command lines in Python" +groups = ["default"] +dependencies = [ + "wcwidth", +] +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + [[package]] name = "protobuf" version = "4.25.3" @@ -1820,6 +2427,21 @@ files = [ {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, ] +[[package]] +name = "ptera" +version = "1.4.1" +requires_python = ">=3.7,<4.0" +summary = "Call graph addressing library." +groups = ["default"] +dependencies = [ + "codefind<0.2.0,>=0.1.2; python_version ~= \"3.8\"", + "giving<0.5.0,>=0.4.1", +] +files = [ + {file = "ptera-1.4.1-py3-none-any.whl", hash = "sha256:91b2d813b5a5534538d2f87452029194808e8bf415f834bd47e010008c2b5d21"}, + {file = "ptera-1.4.1.tar.gz", hash = "sha256:ef54756245008cbd60a272312dc4bd0b7a93cd9c7b5963c198cb4ec18a8bd10a"}, +] + [[package]] name = "py-cpuinfo" version = "9.0.0" @@ -1830,6 +2452,17 @@ files = [ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] +[[package]] +name = "pycparser" +version = "2.22" +requires_python = ">=3.8" +summary = "C parser in Python" +groups = ["default"] +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" version = "2.7.4" @@ -1919,12 +2552,49 @@ name = "pygments" version = "2.18.0" requires_python = ">=3.8" summary = "Pygments is a syntax highlighting package written in Python." -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] +[[package]] +name = "pymdown-extensions" +version = "10.8.1" +requires_python = ">=3.8" +summary = "Extension pack for Python Markdown." +groups = ["dev"] +dependencies = [ + "markdown>=3.6", + "pyyaml", +] +files = [ + {file = "pymdown_extensions-10.8.1-py3-none-any.whl", hash = "sha256:f938326115884f48c6059c67377c46cf631c733ef3629b6eed1349989d1b30cb"}, + {file = "pymdown_extensions-10.8.1.tar.gz", hash = "sha256:3ab1db5c9e21728dabf75192d71471f8e50f216627e9a1fa9535ecb0231b9940"}, +] + +[[package]] +name = "pynacl" +version = "1.5.0" +requires_python = ">=3.6" +summary = "Python binding to the Networking and Cryptography (NaCl) library" +groups = ["default"] +dependencies = [ + "cffi>=1.4.1", +] +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + [[package]] name = "pyopengl" version = "3.1.7" @@ -2111,7 +2781,7 @@ name = "python-dateutil" version = "2.9.0.post0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Extensions to the standard Python datetime module" -groups = ["default"] +groups = ["default", "dev"] dependencies = [ "six>=1.5", ] @@ -2197,12 +2867,79 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +requires_python = ">=3.6" +summary = "A custom YAML tag for referencing environment variables in YAML files. " +groups = ["dev"] +dependencies = [ + "pyyaml", +] +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[[package]] +name = "questionary" +version = "1.10.0" +requires_python = ">=3.6,<4.0" +summary = "Python library to build pretty command line user prompts โญ๏ธ" +groups = ["default"] +dependencies = [ + "prompt-toolkit<4.0,>=2.0", +] +files = [ + {file = "questionary-1.10.0-py3-none-any.whl", hash = "sha256:fecfcc8cca110fda9d561cb83f1e97ecbb93c613ff857f655818839dac74ce90"}, + {file = "questionary-1.10.0.tar.gz", hash = "sha256:600d3aefecce26d48d97eee936fdb66e4bc27f934c3ab6dd1e292c4f43946d90"}, +] + +[[package]] +name = "reactivex" +version = "4.0.4" +requires_python = ">=3.7,<4.0" +summary = "ReactiveX (Rx) for Python" +groups = ["default"] +dependencies = [ + "typing-extensions<5.0.0,>=4.1.1", +] +files = [ + {file = "reactivex-4.0.4-py3-none-any.whl", hash = "sha256:0004796c420bd9e68aad8e65627d85a8e13f293de76656165dffbcb3a0e3fb6a"}, + {file = "reactivex-4.0.4.tar.gz", hash = "sha256:e912e6591022ab9176df8348a653fe8c8fa7a301f26f9931c9d8c78a650e04e8"}, +] + +[[package]] +name = "regex" +version = "2024.5.15" +requires_python = ">=3.8" +summary = "Alternative regular expression module, to replace re." +groups = ["dev"] +files = [ + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, + {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, + {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, + {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, +] + [[package]] name = "requests" version = "2.32.3" requires_python = ">=3.8" summary = "Python HTTP for Humans." -groups = ["default"] +groups = ["default", "dev"] dependencies = [ "certifi>=2017.4.17", "charset-normalizer<4,>=2", @@ -2390,7 +3127,7 @@ name = "six" version = "1.16.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" summary = "Python 2 and 3 compatibility utilities" -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -2418,6 +3155,17 @@ files = [ {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] +[[package]] +name = "sshconf" +version = "0.2.6" +requires_python = ">=3.5" +summary = "Lightweight SSH config library." +groups = ["default"] +files = [ + {file = "sshconf-0.2.6-py3-none-any.whl", hash = "sha256:6fbeef5d4a3ec3ae16867e0ab06ace34ea0f10a4eb294dd865ba400a8668885f"}, + {file = "sshconf-0.2.6.tar.gz", hash = "sha256:7f6c4fad4a943d9ab0cd6ccf57e67f0f691a06e8bbc8614437bfc84e02ca3943"}, +] + [[package]] name = "submitit" version = "1.5.1" @@ -2689,12 +3437,26 @@ name = "urllib3" version = "2.2.2" requires_python = ">=3.8" summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["default"] +groups = ["default", "dev"] files = [ {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] +[[package]] +name = "varname" +version = "0.10.0" +requires_python = ">=3.6,<4.0" +summary = "Dark magics about variable names in python." +groups = ["default"] +dependencies = [ + "executing<2.0,>=1.1", +] +files = [ + {file = "varname-0.10.0-py3-none-any.whl", hash = "sha256:20748d5cd3e125350726cd39d2cbd0e3000f30b3e0d3d5fe827efa0e71729809"}, + {file = "varname-0.10.0.tar.gz", hash = "sha256:045f7a409b3e91a760ab10a5539aabbb292db9d685f3011920b85fd4dbc5b9e3"}, +] + [[package]] name = "wandb" version = "0.17.2" @@ -2725,6 +3487,59 @@ files = [ {file = "wandb-0.17.2-py3-none-win_amd64.whl", hash = "sha256:62cd707f38b5711971729dae80343b8c35f6003901e690166cc6d526187a9785"}, ] +[[package]] +name = "watchdog" +version = "4.0.1" +requires_python = ">=3.8" +summary = "Filesystem events monitoring" +groups = ["dev"] +files = [ + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, + {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, + {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, + {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, + {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, +] + +[[package]] +name = "wcmatch" +version = "8.5.2" +requires_python = ">=3.8" +summary = "Wildcard/glob file name matcher." +groups = ["dev"] +dependencies = [ + "bracex>=2.1.1", +] +files = [ + {file = "wcmatch-8.5.2-py3-none-any.whl", hash = "sha256:17d3ad3758f9d0b5b4dedc770b65420d4dac62e680229c287bf24c9db856a478"}, + {file = "wcmatch-8.5.2.tar.gz", hash = "sha256:a70222b86dea82fb382dd87b73278c10756c138bd6f8f714e2183128887b9eb2"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +summary = "Measures the displayed width of unicode strings in a terminal" +groups = ["default"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + [[package]] name = "werkzeug" version = "3.0.3" diff --git a/project/__init__.py b/project/__init__.py index b71fe767..9a0b0091 100644 --- a/project/__init__.py +++ b/project/__init__.py @@ -1,18 +1,29 @@ -from .algorithms import Algorithm, ExampleAlgorithm, ManualGradientsExample, NoOp +from . import algorithms, configs, datamodules, experiment, main, networks, utils +from .algorithms import Algorithm from .configs import Config -from .datamodules import VisionDataModule -from .datamodules.image_classification.image_classification import ImageClassificationDataModule from .experiment import Experiment -from .networks import FcNet + +# from .networks import FcNet +from .utils.types import DataModule __all__ = [ + "algorithms", + "experiment", + "main", + "Experiment", + "configs", + "datamodules", + "networks", "Algorithm", - "ExampleAlgorithm", - "ManualGradientsExample", - "NoOp", + "DataModule", + "utils", + # "ExampleAlgorithm", + # "ManualGradientsExample", + # "NoOp", "Config", - "ImageClassificationDataModule", - "VisionDataModule", + # "ImageClassificationDataModule", + "DataModule", + # "VisionDataModule", "Experiment", - "FcNet", + # "FcNet", ] diff --git a/project/cluster_test.py b/project/cluster_test.py deleted file mode 100644 index 07e776cb..00000000 --- a/project/cluster_test.py +++ /dev/null @@ -1,520 +0,0 @@ -"""Idea: Use `submitit` to test that the setup works for this repo on the current cluster. - -TODOs/ideas: -- Create a fixture that scancel's the jobs if I KeyboardInterrupt the test. -""" - -from __future__ import annotations - -import math -import os -import shutil -import subprocess -import warnings -from collections.abc import Callable, Mapping -from dataclasses import fields, is_dataclass -from typing import Any, Literal, overload - -import pytest -from hydra import initialize -from hydra._internal.callbacks import Callbacks -from hydra.core.global_hydra import GlobalHydra -from hydra.core.utils import JobReturn, JobStatus, run_job -from hydra.plugins.sweeper import Sweeper -from hydra.types import HydraContext, RunMode -from hydra_zen import instantiate -from hydra_zen._launch import _NotSet, _store_config -from omegaconf import OmegaConf - -from project.configs.config import Config -from project.main import main -from project.utils.types import Dataclass - -PROJECT_NAME = "project" # TODO: Change this to the name of your project. -TEST_JOB_NAME = "cluster_test" - -# TODO: get output of savail instead. -gpu_types = [ - "1g.10gb", # MIG-ed A100 GPU - "2g.20gb", # MIG-ed A100 GPU - "3g.40gb", # MIG-ed A100 GPU - # "a100", - # "a100l", # Note: needs a reservation. - # "a6000", - "rtx8000", - pytest.param( - "v100", - marks=[ - pytest.mark.xfail(reason="Can take a while to schedule"), - pytest.mark.timeout(120), - ], - ), -] -gpu_types = [None] - -pytestmark = pytest.mark.skipif( - not shutil.which("sbatch"), reason="Needs to be run on a SLURM cluster." -) - - -@pytest.fixture(autouse=True, scope="session") -def scancel_jobs_after_tests(): - yield - # TODO: run scancel over ssh to the cluster if running locally. - if shutil.which("scancel"): - username = os.environ["USER"] - subprocess.check_call(["scancel", "-u", username, "--name", TEST_JOB_NAME]) - - -@pytest.mark.skipif("SLURM_JOB_ID" not in os.environ, reason="Not running on a SLURM cluster") -@pytest.mark.parametrize("nodes", [1]) -@pytest.mark.parametrize("gpus", [1, 2]) -@pytest.mark.parametrize("gpu_type", gpu_types) -def test_launch_job_on_cluster(nodes: int, gpus: int, gpu_type: str | None) -> None: - """Test that we can auto-pack multiple jobs on a single GPU.""" - - jobs_per_gpu = 2 - - cpus_per_gpu = 2 - mem_per_node_gb = 16 - assert gpus % nodes == 0 - gpus_per_node = gpus // nodes - mem_per_gpu_gb = max(1, math.floor(mem_per_node_gb / gpus_per_node)) - - if gpus > 1 and gpu_type and "." in gpu_type: - # NOTE: Is it possible that this would actually work? - pytest.skip("Not launching multi-gpu jobs using MIG.") - - overrides = [ - "name=test", - "algorithm=example_algo", - "datamodule=cifar10", - "experiment=overfit_one_batch", - "trainer.max_epochs=1", - # This 'resources' group is where most of the configuration options for the slurm - # launcher are. Here we just overwrite some of them. - # For more info, check out project/configs/resources/one_gpu.yaml - "resources=one_gpu", - # Overrides compared to `one_gpu.yaml`: - # TODO: Pack more than one job on a single GPU. - # Jobs should last less than 10 minutes. (actually more like 1 minute, but making it - # simple.) - "hydra.launcher.additional_parameters.time=0-00:10:00", - f"hydra.launcher.nodes={nodes}", - f"hydra.launcher.tasks_per_node={gpus_per_node * jobs_per_gpu}", # a.k.a. ntasks_per_node - f"hydra.launcher.cpus_per_task={min(1, cpus_per_gpu // jobs_per_gpu)}", - "hydra.launcher.gres=" + (f"gpu:{gpu_type}:1" if gpu_type is not None else "gpu:1"), - f"hydra.launcher.gres=gpu:{gpu_type}:1", - f"hydra.launcher.mem_per_gpu={mem_per_gpu_gb}G", - # f"hydra.launcher.mem_gb={mem_gb}", - f"trainer.devices={gpus}", - ] - - distributed_training = gpus > 1 or nodes > 1 - - if gpu_type == "" and distributed_training: - # Avoid the nodes with MIG-ed GPUs when asking for "any" GPU in a distributed setting. - overrides.append("hydra.launcher.additional_parameters.exclude=cn-g[005-012,017-026]") - if distributed_training: - overrides.append("+trainer.strategy=ddp") - if nodes > 1: - overrides.append(f"trainer.num_nodes={nodes}") # TODO: Actually test nodes > 1 - - # Run the job directly on the current node: - # output = main(config) - config_path = "configs" - output = launch( - Config, - task_function=main, - overrides=overrides, - multirun=True, - config_name="config", - job_name=TEST_JOB_NAME, - config_path=config_path, - caller_stack_depth=2, - ) - job_outputs = output - assert len(job_outputs) == 1 - assert len(job_outputs[0]) == 1 - job_output = job_outputs[0][0] - assert job_output.status is JobStatus.COMPLETED - job_val_classification_error = job_output.return_value - assert isinstance(job_val_classification_error, float) - assert 0 <= job_val_classification_error <= 1 - # options = OmegaConf.to_object(config) - - -def test_packing_runs_in_one_job() -> None: - """Test that we can pack multiple runs in a single job (on one GPU).""" - config_path = "configs" - - nodes = 1 - gpus = 1 - # gpu_type = "1g.10gb" - gpu_type = None - cpus_per_gpu = 2 - mem_per_node_gb = 16 - assert gpus % nodes == 0 - gpus_per_node = gpus // nodes - mem_per_gpu_gb = max(1, math.floor(mem_per_node_gb / gpus_per_node)) - - if gpus > 1 and gpu_type and "." in gpu_type: - # NOTE: Is it possible that this would actually work? - pytest.skip("Not launching multi-gpu jobs using MIG.") - - overrides = [ - "name=test", - "algorithm=example_algo", - "datamodule=cifar10", - "experiment=overfit_one_batch", - "trainer.max_epochs=1", - # This 'resources' group is where most of the configuration options for the slurm - # launcher are. Here we just overwrite some of them. - # For more info, check out project/configs/resources/one_gpu.yaml - "resources=one_gpu", - # Overrides compared to `one_gpu.yaml`: - # TODO: Pack more than one job on a single GPU. - # Jobs should last less than 10 minutes. (actually more like 1 minute, but making it - # simple.) - "hydra.launcher.additional_parameters.time=0-00:10:00", - f"hydra.launcher.nodes={nodes}", - f"hydra.launcher.tasks_per_node={gpus_per_node}", # a.k.a. ntasks_per_node - f"hydra.launcher.cpus_per_task={cpus_per_gpu}", - "hydra.launcher.gpus_per_task=" + (f"{gpu_type}:1" if gpu_type is not None else "1"), - f"hydra.launcher.mem_per_gpu={mem_per_gpu_gb}G", - # f"hydra.launcher.mem_gb={mem_gb}", - f"trainer.devices={gpus}", - ] - - distributed_training = gpus > 1 or nodes > 1 - - if gpu_type == "" and distributed_training: - # Avoid the nodes with MIG-ed GPUs when asking for "any" GPU in a distributed setting. - overrides.append("hydra.launcher.additional_parameters.exclude=cn-g[005-012,017-026]") - if distributed_training: - overrides.append("+trainer.strategy=ddp") - if nodes > 1: - overrides.append(f"trainer.num_nodes={nodes}") # TODO: Actually test nodes > 1 - - # Run the job directly on the current node: - # output = main(config) - - output = launch( - Config, - task_function=main, - overrides=overrides, - multirun=True, - config_name="config", - job_name=TEST_JOB_NAME, - config_path=config_path, - caller_stack_depth=2, - ) - job_outputs = output - assert len(job_outputs) == 1 - assert len(job_outputs[0]) == 1 - job_output = job_outputs[0][0] - assert job_output.status is JobStatus.COMPLETED - job_val_classification_error = job_output.return_value - assert isinstance(job_val_classification_error, float) - assert 0 <= job_val_classification_error <= 1 - # options = OmegaConf.to_object(config) - - -@overload -def launch( - config: Dataclass | type[Dataclass] | Mapping[str, Any], - task_function: Callable[[Any], Any], - overrides: list[str] | None = None, - version_base: str | type[_NotSet] | None = _NotSet, - to_dictconfig: bool = False, - config_name: str = "zen_launch", - job_name: str = "zen_launch", - with_log_configuration: bool = True, - # This changes: - multirun: Literal[True] = True, - # Added parameters: - config_path: str | None = None, - caller_stack_depth: int = 2, -) -> list[list[JobReturn]]: ... - - -@overload -def launch( - config: Dataclass | type[Dataclass] | Mapping[str, Any], - task_function: Callable[[Any], Any], - overrides: list[str] | None = None, - version_base: str | type[_NotSet] | None = _NotSet, - to_dictconfig: bool = False, - config_name: str = "zen_launch", - job_name: str = "zen_launch", - with_log_configuration: bool = True, - # This changes: - multirun: Literal[False] = False, - # Added parameters: - config_path: str | None = None, - caller_stack_depth: int = 2, -) -> JobReturn: ... - - -# NOTE: This is a copied and slightly modified version of `launch` from `hydra_zen._launch` to add -# the `config_path` and `caller_stack_depth` parameters. -def launch( - config: Dataclass | type[Dataclass] | Mapping[str, Any], - task_function: Callable[[Any], Any], - overrides: list[str] | None = None, - version_base: str | type[_NotSet] | None = "1.2", - to_dictconfig: bool = False, - config_name: str = "zen_launch", - job_name: str = "zen_launch", - with_log_configuration: bool = True, - multirun: bool = False, - # Added parameters: - config_path: str | None = None, - caller_stack_depth: int = 2, -) -> JobReturn | list[list[JobReturn]]: - r"""Launch a Hydra job using a Python-based interface. - - `launch` is designed to closely match the interface of the standard Hydra CLI. - For example, launching a Hydra job from the CLI via:: - - $ python my_task.py job/group=group_name job.group.param=1 - - corresponds to the following usage of `launch`: - - >>> job = launch( # doctest: +SKIP - ... config, - ... task_function, - ... overrides=["job/group=group_name", "job.group.param=1"], - ... ) - - Parameters - ---------- - config : Dataclass | Type[Dataclass] | Mapping[str, Any] - A config that will be passed to ``task_function``. - - task_function : Callable[[DictConfig], Any] - The function that Hydra will execute. Its input will be ``config``, which - has been modified via the specified ``overrides`` - - overrides : Optional[List[str]] - If provided, sets/overrides values in ``config``. See [1]_ and [2]_ - for a detailed discussion of the "grammar" supported by ``overrides``. - - multirun : bool (default: False) - Launch a Hydra multi-run ([3]_). - - version_base : Optional[str], optional (default=_NotSet) - Available starting with Hydra 1.2.0. - - If the `version_base parameter` is not specified, Hydra 1.x will use defaults compatible - with version 1.1. Also in this case, a warning is issued to indicate an explicit - version_base is preferred. - - If the `version_base parameter` is `None`, then the defaults are chosen for the current - minor Hydra version. For example for Hydra 1.2, then would imply `config_path=None` and - `hydra.job.chdir=False`. - - If the `version_base` parameter is an explicit version string like "1.1", then the - defaults appropriate to that version are used. - - to_dictconfig: bool (default: False) - If ``True``, convert a ``dataclasses.dataclass`` to a ``omegaconf.DictConfig``. Note, this - will remove Hydra's cabability for validation with structured configurations. - - config_name : str (default: "zen_launch") - Name of the stored configuration in Hydra's ConfigStore API. - - job_name : str (default: "zen_launch") - - with_log_configuration : bool (default: True) - If ``True``, enables the configuration of the logging subsystem from the loaded config. - - Returns - ------- - result : JobReturn | Any - If ``multirun is False``: - A ``JobReturn`` object storing the results of the Hydra experiment via the following - attributes - - ``cfg``: Reflects ``config`` - - ``overrides``: Reflects ``overrides`` - - ``return_value``: The return value of the task function - - ``hydra_cfg``: The Hydra configuration object - - ``working_dir``: The experiment working directory - - ``task_name``: The task name of the Hydra job - - ``status``: A ``JobStatus`` enum reporting whether or not the job completed - successfully - Else: - Return values of all launched jobs (depends on the Sweeper implementation). - - References - ---------- - .. [1] https://hydra.cc/docs/advanced/override_grammar/basic - .. [2] https://hydra.cc/docs/configure_hydra/intro - .. [3] https://hydra.cc/docs/tutorials/basic/running_your_app/multi-run - - Examples - -------- - - **Basic usage** - - Let's define and launch a trivial Hydra app. - - >>> from hydra_zen import make_config, launch, to_yaml # doctest: +SKIP - - First, we will define a config, which determines the configurable interface to our - "app". For the purpose of example, we'll design the "interface" of this config to accept - two configurable parameters: ``a`` and ``b``. - - >>> Conf = make_config("a", "b") # doctest: +SKIP - - Our task function accepts the config as an input and uses it to run some generic functionality. - For simplicity's sake, let's design this task function to: convert the job's config to a - yaml-formatted string, print it, and then return the string. - - >>> def task_fn(cfg): - ... out = to_yaml(cfg) # task's input config, converted to yaml-string - ... print(out) - ... return out - - Now, let's use `launch` to run this task function via Hydra, using particular configured - values (or, "overrides") for ``a`` and ``b``. - - >>> job_out = launch(Conf, task_fn, overrides=["a=1", "b='foo'"]) # doctest: +SKIP - a: 1 - b: foo - - Let's inspect ``job_out`` to see the ways that it summarizes the results of this job. - - >>> job_out.return_value # the value returned by `task_fn` # doctest: +SKIP - 'a: 1\nb: foo\n' - - >>> # where the job's outputs, logs, and configs are saved - >>> job_out.working_dir # doctest: +SKIP - 'outputs/2021-10-19/15-27-11' - - >>> job_out.cfg # the particular config used to run our task-function # doctest: +SKIP - {'a': 1, 'b': 'foo'} - - >>> job_out.overrides # the overrides that we provides # doctest: +SKIP - ['a=1', "b='foo'"] - - >>> job_out.status # the job's completion status # doctest: +SKIP - - - **Launching a multirun job** - - We can launch multiple runs of our task-function, using various configured values. - Let's launch a multirun that sweeps over three configurations - - >>> (outputs,) = launch( # doctest: +SKIP - ... Conf, - ... task_fn, - ... overrides=["a=1,2,3", "b='bar'"], - ... multirun=True, - ... ) - [2021-10-19 17:50:07,334][HYDRA] Launching 3 jobs locally - [2021-10-19 17:50:07,334][HYDRA] #0 : a=1 b='bar' - a: 1 - b: bar - [2021-10-19 17:50:07,434][HYDRA] #1 : a=2 b='bar' - a: 2 - b: bar - [2021-10-19 17:50:07,535][HYDRA] #2 : a=3 b='bar' - a: 3 - b: bar - - ``outputs`` contains three corresponding ``JobReturns`` instances. - - >>> len(outputs) # doctest: +SKIP - 3 - >>> [j.cfg for j in outputs] # doctest: +SKIP - [{'a': 1, 'b': 'bar'}, {'a': 2, 'b': 'bar'}, {'a': 3, 'b': 'bar'}] - - Each run's outputs, logs, and configs are saved to separate working directories - - >>> [j.working_dir for j in outputs] # doctest: +SKIP - ['multirun/2021-10-19/17-50-07\\0', - 'multirun/2021-10-19/17-50-07\\1', - 'multirun/2021-10-19/17-50-07\\2'] - """ - - # used for check below - _num_dataclass_fields = 0 - if is_dataclass(config): - _num_dataclass_fields = len(fields(config)) - - # store config in ConfigStore - if to_dictconfig and is_dataclass(config): - # convert Dataclass to a DictConfig - dictconfig = OmegaConf.create(OmegaConf.to_container(OmegaConf.structured(config))) - config_name = _store_config(dictconfig, config_name) - else: - config_name = _store_config(config, config_name) - - # Initializes Hydra and add the config_path to the config search path - with initialize( - config_path=config_path, - caller_stack_depth=caller_stack_depth, - job_name=job_name, - version_base=version_base, - ): - # taken from hydra.compose with support for MULTIRUN - gh = GlobalHydra.instance() - assert gh.hydra is not None - - # Load configuration - cfg = gh.hydra.compose_config( - config_name=config_name, - overrides=overrides if overrides is not None else [], - run_mode=RunMode.RUN if not multirun else RunMode.MULTIRUN, - from_shell=False, - with_log_configuration=with_log_configuration, - ) - - callbacks = Callbacks(cfg) - run_start = callbacks.on_run_start if not multirun else callbacks.on_multirun_start - run_start(config=cfg, config_name=config_name) - - hydra_context = HydraContext(config_loader=gh.config_loader(), callbacks=callbacks) - - if not multirun: - job = run_job( - hydra_context=hydra_context, - task_function=task_function, - config=cfg, - job_dir_key="hydra.run.dir", - job_subdir_key=None, - configure_logging=with_log_configuration, - ) - callbacks.on_run_end(config=cfg, config_name=config_name, job_return=job) - - # access the result to trigger an exception in case the job failed. - _ = job.return_value - else: - # Instantiate sweeper without using Hydra's Plugin discovery (Zen!) - sweeper = instantiate(cfg.hydra.sweeper) - assert isinstance(sweeper, Sweeper) - sweeper.setup( - config=cfg, - hydra_context=hydra_context, - task_function=task_function, - ) - - task_overrides = OmegaConf.to_container(cfg.hydra.overrides.task, resolve=False) - assert isinstance(task_overrides, list) - job = sweeper.sweep(arguments=task_overrides) - callbacks.on_multirun_end(config=cfg, config_name=config_name) - - if is_dataclass(config): - _num_dataclass_fields_after = len(fields(config)) - if ( - _num_dataclass_fields_after == 0 - and _num_dataclass_fields_after < _num_dataclass_fields - ): - warnings.warn( - "Your dataclass-based config was mutated by this run. If you just executed with a " - "`hydra/launcher` that utilizes cloudpickle (e.g., hydra-submitit-launcher), " - "there is a known issue with dataclasses " - "(see: https://github.com/cloudpipe/cloudpickle/issues/386). You will have " - "to restart your interactive environment to run `launch` again. To avoid this " - "issue you can use the `launch` option: `to_dictconfig=True`." - ) - - return job diff --git a/project/datamodules/__init__.py b/project/datamodules/__init__.py index 72563061..10040123 100644 --- a/project/datamodules/__init__.py +++ b/project/datamodules/__init__.py @@ -1,6 +1,11 @@ +"""Datamodules (datasets + preprocessing + dataloading) + +See the :ref:`lightning.LightningDataModule` class for more information. +""" + +from .image_classification import ImageClassificationDataModule from .image_classification.cifar10 import CIFAR10DataModule, cifar10_normalization from .image_classification.fashion_mnist import FashionMNISTDataModule -from .image_classification.image_classification import ImageClassificationDataModule from .image_classification.imagenet import ImageNetDataModule from .image_classification.imagenet32 import ImageNet32DataModule, imagenet32_normalization from .image_classification.inaturalist import INaturalistDataModule diff --git a/project/main.py b/project/main.py index dc053068..8910098e 100644 --- a/project/main.py +++ b/project/main.py @@ -1,3 +1,5 @@ +"""Main entry-point.""" + from __future__ import annotations import dataclasses @@ -35,6 +37,7 @@ version_base="1.2", ) def main(dict_config: DictConfig) -> dict: + """Main entry point for training a model.""" print_config(dict_config, resolve=False) config: Config = resolve_dictconfig(dict_config) diff --git a/project/utils/utils.py b/project/utils/utils.py index 0702f0f1..145496cc 100644 --- a/project/utils/utils.py +++ b/project/utils/utils.py @@ -214,10 +214,9 @@ def print_config( TAKEN FROM https://github.com/ashleve/lightning-hydra-template/blob/6a92395ed6afd573fa44dd3a054a603acbdcac06/src/utils/__init__.py#L56 Args: - config (DictConfig): Configuration composed by Hydra. - print_order (Sequence[str], optional): Determines in what order config components are - printed. - resolve (bool, optional): Whether to resolve reference fields of DictConfig. + config: Configuration composed by Hydra. + print_order: Determines in what order config components are printed. + resolve: Whether to resolve reference fields of DictConfig. """ style = "dim" diff --git a/pyproject.toml b/pyproject.toml index 6f7025a3..bbaa657b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ dependencies = [ "tensor-regression @ git+https://www.github.com/lebrice/tensor_regression", "simple-parsing>=0.1.5", "pydantic==2.7.4", + "milatools>=0.0.18", ] requires-python = ">=3.12" readme = "README.md" @@ -56,6 +57,14 @@ dev = [ "pytest-cov>=5.0.0", "tensor-regression>=0.0.2.post3.dev0", "pytest-testmon>=2.1.1", + "mkdocs>=1.6.0", + "pymdown-extensions>=10.8.1", + "mkdocstrings[python]>=0.25.1", + "mkdocs-gen-files>=0.5.0", + "black>=24.4.2", + "mkdocs-awesome-pages-plugin>=2.9.2", + "mkdocs-literate-nav>=0.6.1", + "mkdocs-material>=9.5.28", ] [[tool.pdm.source]]