diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..b89748c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,26 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +### Describe the bug +A clear and concise description of what the bug is. + +### To Reproduce +Code snippet or clear steps to reproduce behaviour. + +### Expected behavior +A clear and concise description of what you expected to happen. + +### Screenshots +If applicable, add screenshots to help explain your problem. + +### Version + - Version info such as v0.1.5 + +### Additional context +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..3ba13e0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..b2de4ff --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +### Is your feature request related to a problem? Please describe. +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +### Describe the solution you'd like +A clear and concise description of what you want to happen. + +### Describe alternatives you've considered +A clear and concise description of any alternative solutions or features you've considered. + +### Additional context +Add any other context or screenshots about the feature request here. diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..7d639c9 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,7 @@ +# PR Type ([Feature | Fix | Documentation | Test]) + +## Short Description +... + +## Tests Added +... diff --git a/.github/workflows/code_checks.yml b/.github/workflows/code_checks.yml new file mode 100644 index 0000000..267469e --- /dev/null +++ b/.github/workflows/code_checks.yml @@ -0,0 +1,41 @@ +name: code checks + +on: + push: + branches: + - main + paths: + - .pre-commit-config.yaml + - .github/workflows/code_checks.yml + - '**.py' + - poetry.lock + - pyproject.toml + - '**.ipynb' + pull_request: + branches: + - main + paths: + - .pre-commit-config.yaml + - .github/workflows/code_checks.yml + - '**.py' + - poetry.lock + - pyproject.toml + - '**.ipynb' + +jobs: + run-code-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Install poetry + run: python3 -m pip install --upgrade pip && python3 -m pip install poetry + - uses: actions/setup-python@v4.7.1 + with: + python-version: '3.10' + cache: 'poetry' + - name: Install dependencies and check code + run: | + poetry env use '3.10' + source $(poetry env info --path)/bin/activate + poetry install --without docs + pre-commit run --all-files diff --git a/.github/workflows/docs_build.yml b/.github/workflows/docs_build.yml new file mode 100644 index 0000000..4bd2567 --- /dev/null +++ b/.github/workflows/docs_build.yml @@ -0,0 +1,48 @@ +name: docs (build) + +on: + pull_request: + branches: + - main + paths: + - .pre-commit-config.yaml + - .github/workflows/docs_build.yml + - '**.py' + - '**.ipynb' + - poetry.lock + - pyproject.toml + - '**.rst' + - '**.md' + +jobs: + build: + runs-on: [self-hosted, db] + steps: + - uses: actions/checkout@v3 + with: + submodules: 'true' + - name: Install dependencies, build docs and coverage report + run: python3 -m pip install --upgrade pip && python3 -m pip install poetry + - uses: actions/setup-python@v4.7.1 + with: + python-version: '3.10' + cache: 'poetry' + - run: | + python3 -m pip install --upgrade pip && python3 -m pip install poetry + poetry env use '3.10' + source $(poetry env info --path)/bin/activate + poetry install + # pandoc README.md -f markdown -t rst -s -o docs/source/intro.rst + cd docs && rm -rf source/reference/api/_autosummary && make html + cd .. && coverage run -m pytest -m "not integration_test" && coverage xml && coverage report -m + - name: Upload coverage to Codecov + uses: Wandalen/wretry.action@v1.0.36 + with: + action: codecov/codecov-action@v3.1.3 + with: | + token: ${{ secrets.CODECOV_TOKEN }} + file: ./coverage.xml + name: codecov-umbrella + fail_ci_if_error: true + attempt_limit: 5 + attempt_delay: 30000 diff --git a/.github/workflows/docs_deploy.yml b/.github/workflows/docs_deploy.yml new file mode 100644 index 0000000..2f4c4a2 --- /dev/null +++ b/.github/workflows/docs_deploy.yml @@ -0,0 +1,56 @@ +name: docs + +on: + push: + branches: + - main + paths: + - .pre-commit-config.yaml + - .github/workflows/code_checks.yml + - .github/workflows/docs_build.yml + - .github/workflows/docs_deploy.yml + - .github/workflows/integration_tests.yml + - '**.py' + - '**.ipynb' + - poetry.lock + - pyproject.toml + - '**.rst' + - '**.md' + +jobs: + deploy: + runs-on: [self-hosted, db] + steps: + - uses: actions/checkout@v3 + with: + submodules: 'true' + - name: Install dependencies, build docs and coverage report + run: python3 -m pip install --upgrade pip && python3 -m pip install poetry + - uses: actions/setup-python@v4.7.1 + with: + python-version: '3.10' + cache: 'poetry' + - run: | + poetry env use '3.10' + source $(poetry env info --path)/bin/activate + poetry install + # pandoc README.md -f markdown -t rst -s -o docs/source/intro.rst + cd docs && rm -rf source/reference/api/_autosummary && make html + cd .. && coverage run -m pytest -m "not integration_test" && coverage xml && coverage report -m + - name: Upload coverage to Codecov + uses: Wandalen/wretry.action@v1.0.36 + with: + action: codecov/codecov-action@v3.1.3 + with: | + token: ${{ secrets.CODECOV_TOKEN }} + file: ./coverage.xml + name: codecov-umbrella + fail_ci_if_error: true + attempt_limit: 5 + attempt_delay: 30000 + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_branch: github_pages + publish_dir: docs/build diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 0000000..e4b698a --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,61 @@ +name: integration tests + +on: + push: + branches: + - main + paths: + - .pre-commit-config.yaml + - .github/workflows/code_checks.yml + - .github/workflows/docs_build.yml + - .github/workflows/docs_deploy.yml + - .github/workflows/integration_tests.yml + - '**.py' + - '**.ipynb' + - poetry.lock + - pyproject.toml + - '**.rst' + - '**.md' + pull_request: + branches: + - main + paths: + - .pre-commit-config.yaml + - .github/workflows/code_checks.yml + - .github/workflows/docs_build.yml + - .github/workflows/docs_deploy.yml + - .github/workflows/integration_tests.yml + - '**.py' + - '**.ipynb' + - poetry.lock + - pyproject.toml + - '**.rst' + - '**.md' + +jobs: + integration-tests: + runs-on: [self-hosted, db] + steps: + - uses: actions/checkout@v3 + - name: Install poetry + run: pip install poetry + - uses: actions/setup-python@v4.7.1 + with: + python-version: '3.10' + - name: Install dependencies and check code + run: | + poetry env use '3.10' + source $(poetry env info --path)/bin/activate + poetry install --without docs + coverage run -m pytest -m integration_test && coverage xml && coverage report -m + - name: Upload coverage to Codecov + uses: Wandalen/wretry.action@v1.0.36 + with: + action: codecov/codecov-action@v3.1.3 + with: | + token: ${{ secrets.CODECOV_TOKEN }} + file: ./coverage.xml + name: codecov-umbrella + fail_ci_if_error: true + attempt_limit: 5 + attempt_delay: 30000 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..d169cbb --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,27 @@ +name: publish package + +on: + release: + types: [published] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - name: Install apt dependencies + run: | + sudo apt-get update + sudo apt-get install libcurl4-openssl-dev libssl-dev + - uses: actions/checkout@v3 + - name: Install poetry + run: python3 -m pip install --upgrade pip && python3 -m pip install poetry + - uses: actions/setup-python@v4.7.1 + with: + python-version: '3.10' + - name: Build package + run: poetry build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f657a50 --- /dev/null +++ b/.gitignore @@ -0,0 +1,30 @@ +*__pycache__* +*.log +docs/build +.python-version +*.DS_Store +htmlcov +.coverage +venv +.ipynb_checkpoints +*.pt +*.csv +mlruns +.profile +.env +*.html +_extract +*.gzip +*checkpoint* +*.parquet +*.npy +*.pkl +*.npz +*.csv +*.pickle +dist/ +outputs/ +multirun/ +_autosummary +*cyclops_reports* +*dummy_reports* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..c6437a6 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,61 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 # Use the ref you want to point at + hooks: + - id: trailing-whitespace + - id: check-ast + - id: check-builtin-literals + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: debug-statements + - id: end-of-file-fixer + - id: mixed-line-ending + args: [--fix=lf] + - id: requirements-txt-fixer + - id: check-yaml + - id: check-toml + + - repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: 'v0.1.0' + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + types_or: [python, jupyter] + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.5.1 + hooks: + - id: mypy + entry: python3 -m mypy --config-file pyproject.toml + language: system + types: [python] + exclude: "tests" + + - repo: local + hooks: + - id: nbstripout + name: nbstripout + language: python + entry: nbstripout + exclude: ^docs/source/tutorials/gemini.ipynb + + - repo: https://github.com/nbQA-dev/nbQA + rev: 1.7.0 + hooks: + - id: nbqa-black + - id: nbqa-ruff + args: [--fix, --exit-non-zero-on-fix] + + - repo: local + hooks: + - id: pytest + name: pytest + entry: python3 -m pytest -m "not integration_test" + language: system + pass_filenames: false + always_run: true diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..c89648d --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,29 @@ +# Contributing to cyclops-query + +Thanks for your interest in contributing to cyclops-query tool! + +To submit PRs, please fill out the PR template along with the PR. If the PR +fixes an issue, don't forget to link the PR to the issue! + +## Pre-commit hooks + +Once the python virtual environment is setup, you can run pre-commit hooks using: + +```bash +pre-commit run --all-files +``` + +## Coding guidelines + +For code style, we recommend the [google style guide](https://google.github.io/styleguide/pyguide.html). + +Pre-commit hooks apply the [black](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html) +code formatting. + +For docstrings we use [numpy format](https://numpydoc.readthedocs.io/en/latest/format.html). + +We use [ruff](https://github.com/astral-sh/ruff) for further static code analysis. +The pre-commit hooks show errors which you need to fix before submitting a PR. + +Last but not the least, we use type hints in our code which is then checked using +[mypy](https://mypy.readthedocs.io/en/stable/). diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..ab87573 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022, Vector Institute + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index d695ad3..d41c9e5 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,67 @@ # cyclops-query -A tool to query EHR databases +-------------------------------------------------------------------------------- + +[![PyPI](https://img.shields.io/pypi/v/cycquery)](https://pypi.org/project/cycquery) +[![code checks](https://github.com/VectorInstitute/cyclops-query/actions/workflows/code_checks.yml/badge.svg)](https://github.com/VectorInstitute/cyclops-query/actions/workflows/code_checks.yml) +[![integration tests](https://github.com/VectorInstitute/cyclops-query/actions/workflows/integration_tests.yml/badge.svg)](https://github.com/VectorInstitute/cyclops-query/actions/workflows/integration_tests.yml) +[![docs](https://github.com/VectorInstitute/cyclops-query/actions/workflows/docs_deploy.yml/badge.svg)](https://github.com/VectorInstitute/cyclops-query/actions/workflows/docs_deploy.yml) +[![codecov](https://codecov.io/gh/VectorInstitute/cyclops-query/branch/main/graph/badge.svg)](https://codecov.io/gh/VectorInstitute/cyclops-query) +[![license](https://img.shields.io/github/license/VectorInstitute/cyclops-query.svg)](https://github.com/VectorInstitute/cyclops-query/blob/main/LICENSE) + +``cyclops-query`` is a tool for querying EHR databases. + +## 🐣 Getting Started + +### Installing cyclops-query using pip + +```bash +python3 -m pip install cycquery +``` + +## 🧑🏿‍💻 Developing + +### Using poetry + +The development environment can be set up using +[poetry](https://python-poetry.org/docs/#installation). Hence, make sure it is +installed and then run: + +```bash +python3 -m poetry install +source $(poetry env info --path)/bin/activate +``` + +API documentation is built using [Sphinx](https://www.sphinx-doc.org/en/master/) and +can be locally built by: + +```bash +cd docs +make html SPHINXOPTS="-D nbsphinx_allow_errors=True" +``` + +### Contributing + +Contributing to ``cyclops-query`` is welcomed. +See [Contributing](https://vectorinstitute.github.io/cyclops-query/api/contributing.html) for +guidelines. + + +## 📚 [Documentation](https://vectorinstitute.github.io/cyclops-query/) + + +## 🎓 Citation + +Reference to cite when you use ``cyclops-query`` in a project or a research paper: + +``` +@article {Krishnan2022.12.02.22283021, + author = {Krishnan, Amrit and Subasri, Vallijah and McKeen, Kaden and Kore, Ali and Ogidi, Franklin and Alinoori, Mahshid and Lalani, Nadim and Dhalla, Azra and Verma, Amol and Razak, Fahad and Pandya, Deval and Dolatabadi, Elham}, + title = {CyclOps: Cyclical development towards operationalizing ML models for health}, + elocation-id = {2022.12.02.22283021}, + year = {2022}, + doi = {10.1101/2022.12.02.22283021}, + publisher = {Cold Spring Harbor Laboratory Press}, + URL = {https://www.medrxiv.org/content/early/2022/12/08/2022.12.02.22283021}, + journal = {medRxiv} +} +``` diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..72f5c36 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,19 @@ +codecov: + require_ci_to_pass: true + notify: + after_n_builds: 2 + wait_for_ci: yes +comment: + behavior: default + layout: reach,diff,flags,tree,reach + show_carryforward_flags: false + require_changes: true +coverage: + status: + changes: true + default_rules: + flag_coverage_not_uploaded_behavior: include + patch: true + project: true +github_checks: + annotations: true diff --git a/cycquery/__init__.py b/cycquery/__init__.py new file mode 100644 index 0000000..40b6d91 --- /dev/null +++ b/cycquery/__init__.py @@ -0,0 +1,16 @@ +"""The ``query`` API provides classes to query EHR databases.""" + +try: + import sqlalchemy +except ImportError: + raise ImportError( + "CyclOps is not installed with query API support! Please install using 'pip install cyclops[query]'.", # noqa: E501 + ) from None + + +from cycquery.base import DatasetQuerier +from cycquery.eicu import EICUQuerier +from cycquery.gemini import GEMINIQuerier +from cycquery.mimiciii import MIMICIIIQuerier +from cycquery.mimiciv import MIMICIVQuerier +from cycquery.omop import OMOPQuerier diff --git a/cycquery/base.py b/cycquery/base.py new file mode 100644 index 0000000..dfedee3 --- /dev/null +++ b/cycquery/base.py @@ -0,0 +1,290 @@ +"""Base querier class.""" + +import logging +from functools import partial +from typing import Any, Callable, Dict, List, Optional + +from sqlalchemy import MetaData +from sqlalchemy.sql.selectable import Subquery + +from cycquery import ops as qo +from cycquery.interface import QueryInterface +from cycquery.orm import Database, DatasetQuerierConfig +from cycquery.util import ( + DBSchema, + _to_subquery, + get_attr_name, +) +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +def _create_get_table_lambdafn(schema_name: str, table_name: str) -> Callable[..., Any]: + """Create a lambda function to access a table. + + Parameters + ---------- + schema_name + The schema name. + table_name + The table name. + + Returns + ------- + Callable + The lambda function. + + """ + return lambda db: getattr(getattr(db, schema_name), table_name) + + +def _cast_timestamp_cols(table: Subquery) -> Subquery: + """Cast timestamp columns to datetime. + + Parameters + ---------- + table + Table to cast. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Table with cast columns. + + """ + cols_to_cast = [] + for col in table.columns: + if str(col.type) == "TIMESTAMP": + cols_to_cast.append(col.name) + if cols_to_cast: + table = qo.Cast(cols_to_cast, "timestamp")(table) + + return table + + +class DatasetQuerier: + """Base class to query EHR datasets. + + Attributes + ---------- + db + ORM Database used to run queries. + + Parameters + ---------- + database + Name of database. + user + Username for database. + password + Password for database. + dbms + Database management system. + host + Hostname of database. + port + Port of database. + + Notes + ----- + This class is intended to be subclassed to provide methods for querying tables in + the database. This class automatically creates methods for querying tables in the + database. The methods are named after the schema and table name, i.e. + `self.schema_name.table_name()`. The methods are created when the class is + instantiated. The subclass can provide custom methods for querying tables in the + database which can build on the methods created by this class. + + """ + + def __init__( + self, + database: str, + user: str, + password: str, + dbms: str = "postgresql", + host: str = "localhost", + port: int = 5432, + ) -> None: + config = DatasetQuerierConfig( + database=database, + user=user, + password=password, + dbms=dbms, + host=host, + port=port, + ) + self.db = Database(config) + if not self.db.is_connected: + LOGGER.error("Database is not connected, cannot run queries.") + return + self._setup_table_methods() + + def list_schemas(self) -> List[str]: + """List schemas in the database to query. + + Returns + ------- + List[str] + List of schema names. + + """ + return list(self.db.inspector.get_schema_names()) + + def list_tables(self, schema_name: Optional[str] = None) -> List[str]: + """List table methods that can be queried using the database. + + Parameters + ---------- + schema_name + Name of schema in the database. + + Returns + ------- + List[str] + List of table names. + + """ + if schema_name: + table_names = [] + for table in self.db.list_tables(): + schema_name_, _ = table.split(".") + if schema_name_ == schema_name: + table_names.append(table) + else: + table_names = self.db.list_tables() + + return table_names + + def list_columns(self, schema_name: str, table_name: str) -> List[str]: + """List columns in a table. + + Parameters + ---------- + schema_name + Name of schema in the database. + table_name + Name of table in the database. + + Returns + ------- + List[str] + List of column names. + + """ + return list( + getattr(getattr(self.db, schema_name), table_name).data.columns.keys(), + ) + + def list_custom_tables(self) -> List[str]: + """List custom tables methods provided by the dataset API. + + Returns + ------- + List[str] + List of custom table names. + + """ + method_list = dir(self) + custom_tables = [] + for method in method_list: + if ( + not method.startswith( + "__", + ) + and not method.startswith("_") + and method not in self.list_schemas() + and not method.startswith("list_") + and not method.startswith("get_") + and method not in ["db"] + ): + custom_tables.append(method) + + return custom_tables + + def get_table( + self, + schema_name: str, + table_name: str, + cast_timestamp_cols: bool = True, + ) -> Subquery: + """Get a table and possibly map columns to have standard names. + + Standardizing column names allows for columns to be + recognized in downstream processing. + + Parameters + ---------- + schema_name + Name of schema in the database. + table_name + Name of table in the database. + cast_timestamp_cols + Whether to cast timestamp columns to datetime. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Table with mapped columns. + + """ + table = _create_get_table_lambdafn(schema_name, table_name)(self.db).data + + if cast_timestamp_cols: + table = _cast_timestamp_cols(table) + + return _to_subquery(table) + + def _template_table_method( + self, + schema_name: str, + table_name: str, + ) -> QueryInterface: + """Template method for table methods. + + Parameters + ---------- + schema_name + Name of schema in the database. + table_name + Name of table in the database. + + Returns + ------- + cycquery.interface.QueryInterface + A query interface object. + + """ + table = getattr(getattr(self.db, schema_name), table_name).data + table = _to_subquery(table) + + return QueryInterface(self.db, table) + + def _setup_table_methods(self) -> None: + """Add table methods. + + This method adds methods to the querier class that allow querying of tables in + the database. The methods are named after the table names. + + """ + schemas = self.list_schemas() + meta: Dict[str, MetaData] = {} + for schema_name in schemas: + metadata = MetaData(schema=schema_name) + metadata.reflect(bind=self.db.engine) + meta[schema_name] = metadata + schema = DBSchema(schema_name, meta[schema_name]) + for table_name in meta[schema_name].tables: + setattr( + schema, + get_attr_name(table_name), + partial( + self._template_table_method, + schema_name=schema_name, + table_name=get_attr_name(table_name), + ), + ) + setattr(self, schema_name, schema) diff --git a/cycquery/eicu.py b/cycquery/eicu.py new file mode 100644 index 0000000..591a7d5 --- /dev/null +++ b/cycquery/eicu.py @@ -0,0 +1,19 @@ +"""EICU-CRD query module. + +Supports querying of eICU. + +""" + +import logging + +from cycquery.base import DatasetQuerier +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +class EICUQuerier(DatasetQuerier): + """EICU dataset querier.""" diff --git a/cycquery/gemini.py b/cycquery/gemini.py new file mode 100644 index 0000000..5cb7617 --- /dev/null +++ b/cycquery/gemini.py @@ -0,0 +1,221 @@ +"""GEMINI query module.""" + +import logging + +from sqlalchemy import select +from sqlalchemy.sql.expression import union_all + +import cycquery.ops as qo +from cycquery.base import DatasetQuerier +from cycquery.interface import QueryInterface +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +# Custm column names. +CARE_UNIT = "care_unit" + + +class GEMINIQuerier(DatasetQuerier): + """GEMINI dataset querier.""" + + def ip_admin( + self, + ) -> QueryInterface: + """Query GEMINI patient encounters. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table("public", "ip_administrative") + + # Possibly cast string representations to timestamps + table = qo.Cast(["admit_date_time", "discharge_date_time"], "timestamp")(table) + + # Get the discharge disposition code descriptions + lookup_table = self.get_table("public", "lookup_ip_administrative") + lookup_table = qo.ConditionEquals("variable", "discharge_disposition")( + lookup_table, + ) + table = qo.Join( + lookup_table, + on=("discharge_disposition", "value"), + on_to_type="int", + join_table_cols="description", + isouter=True, + )(table) + table = qo.Rename({"description": "discharge_description"})(table) + table = qo.Drop("value")(table) + + return QueryInterface(self.db, table) + + def diagnoses( + self, + ) -> QueryInterface: + """Query diagnosis data. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed table, wrapped in an interface object. + + """ + table = self.get_table("public", "diagnosis") + + lookup_table = self.get_table("public", "lookup_diagnosis") + lookup_table = qo.ConditionEquals("variable", "diagnosis_type")(lookup_table) + table = qo.Join( + lookup_table, + on=("diagnosis_type", "value"), + join_table_cols="description", + isouter=True, + )(table) + table = qo.Drop("value")(table) + table = qo.Rename({"description": "diagnosis_type_description"})(table) + table = qo.ReorderAfter("diagnosis_type_description", "diagnosis_type")(table) + + # Trim whitespace from ICD codes. + table = qo.Trim("diagnosis_code")(table) + + return QueryInterface(self.db, table) + + def room_transfer( + self, + ) -> QueryInterface: + """Query room transfer data. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed table, wrapped in an interface object. + + """ + table = self.get_table("public", "room_transfer") + + # Join with lookup to get transfer description. + lookup_table = self.get_table("public", "lookup_room_transfer") + lookup_table = qo.ConditionEquals("variable", "medical_service")(lookup_table) + + table = qo.Join( + lookup_table, + on=("medical_service", "value"), + join_table_cols="description", + isouter=True, + )(table) + table = qo.Rename({"description": "transfer_description"})(table) + + return QueryInterface(self.db, table) + + def care_units( + self, + ) -> QueryInterface: + """Query care unit data, fetches transfer info from multiple tables. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed table, wrapped in an interface object. + + """ + filter_care_unit_cols = qo.Keep( + [ + "genc_id", + "admit", + "discharge", + CARE_UNIT, + ], + ) + + # In-patient table. + ip_table = self.get_table("public", "ip_administrative") + ip_table = qo.Rename( + { + "admit_date_time": "admit", + "discharge_date_time": "discharge", + }, + )(ip_table) + ip_table = qo.Literal("IP", CARE_UNIT)(ip_table) + ip_table = filter_care_unit_cols(ip_table) + + # Special care unit table. + scu_table = self.get_table("public", "ip_scu") + scu_table = qo.Rename( + { + "scu_admit_date_time": "admit", + "scu_discharge_date_time": "discharge", + }, + )(scu_table) + scu_table = qo.Literal("SCU", CARE_UNIT)(scu_table) + scu_table = filter_care_unit_cols(scu_table) + + # Emergency room/department table. + er_table = self.get_table("public", "er_administrative") + er_table = qo.Rename( + { + "er_admit_timestamp": "admit", + "er_discharge_timestamp": "discharge", + }, + )(er_table) + er_table = qo.Literal("ER", CARE_UNIT)(er_table) + er_table = filter_care_unit_cols(er_table) + + # Room transfer table. + rt_table = self.get_table("public", "room_transfer") + rt_table = qo.Rename( + { + "checkin_date_time": "admit", + "checkout_date_time": "discharge", + }, + )(rt_table) + rt_table = qo.Rename({"transfer_description": CARE_UNIT})(rt_table) + rt_table = filter_care_unit_cols(rt_table) + + # Combine. + table = union_all( + select(er_table), + select(scu_table), + select(ip_table), + select(rt_table), + ).subquery() + + return QueryInterface(self.db, table) + + def imaging( + self, + ) -> QueryInterface: + """Query imaging reports data. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed table, wrapped in an interface object. + + """ + table = self.get_table("public", "imaging") + + # Get imaging test description + lookup_table = self.get_table("public", "lookup_imaging") + lookup_table = qo.ConditionEquals("variable", "imaging_test_name_mapped")( + lookup_table, + ) + + table = qo.Join( + lookup_table, + on=("imaging_test_name_mapped", "value"), + on_to_type="str", + join_table_cols="description", + )(table) + table = qo.Drop("value")(table) + table = qo.Rename({"description": "imaging_test_description"})(table) + table = qo.ReorderAfter("imaging_test_description", "imaging_test_name_mapped")( + table, + ) + + return QueryInterface(self.db, table) diff --git a/cycquery/interface.py b/cycquery/interface.py new file mode 100644 index 0000000..5df859e --- /dev/null +++ b/cycquery/interface.py @@ -0,0 +1,253 @@ +"""A query interface class to wrap database objects and queries.""" + +import logging +from typing import List, Literal, Optional, Tuple, Union + +import dask.dataframe as dd +import pandas as pd +from sqlalchemy.sql.elements import BinaryExpression + +import cycquery.ops as qo +from cycquery.orm import Database +from cycquery.util import TableTypes +from cycquery.utils.common import to_list_optional +from cycquery.utils.file import save_dataframe +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +class QueryInterface: + """An interface dataclass to wrap queries, and run them. + + Parameters + ---------- + database + Database object to create ORM, and query data. + query + The query. + + """ + + def __init__( + self, + database: Database, + query: Union[TableTypes, "QueryInterface"], + ) -> None: + """Initialize the QueryInterface object, join and chain operations.""" + self.database = database + if isinstance(query, QueryInterface): + self.query = query.query # type: ignore + else: + self.query = query + self._data = None + + @property + def data(self) -> Optional[Union[pd.DataFrame, dd.core.DataFrame]]: + """Get data.""" + return self._data + + def join( + self, + join_table: Union[TableTypes, "QueryInterface"], + on: Optional[ + Union[ + str, + List[str], + Tuple[str], + List[Tuple[str, str]], + ] + ] = None, + on_to_type: Optional[Union[type, List[type]]] = None, + cond: Optional[BinaryExpression] = None, + table_cols: Optional[Union[str, List[str]]] = None, + join_table_cols: Optional[Union[str, List[str]]] = None, + isouter: Optional[bool] = False, + ) -> "QueryInterface": + """Join the query with another table. + + Parameters + ---------- + join_table + Table to join with. + on + Column(s) to join on. + on_to_type + Type(s) to cast the column(s) to join on. + cond + Condition to join on. + table_cols + Columns to select from the original table. + join_table_cols + Columns to select from the joined table. + isouter + Whether to perform an outer join. + + Returns + ------- + QueryInterface + QueryInterface object with the join operation added. + + """ + on = to_list_optional(on) + on_to_type = to_list_optional(on_to_type) + table_cols = to_list_optional(table_cols) + join_table_cols = to_list_optional(join_table_cols) + if isinstance(join_table, QueryInterface): + join_table = join_table.query + query = qo.Join( + join_table=join_table, + on=on, + on_to_type=on_to_type, + cond=cond, + table_cols=table_cols, + join_table_cols=join_table_cols, + isouter=isouter, + )(self.query) + + return QueryInterface(self.database, query) + + def ops( + self, + ops: Union[qo.QueryOp, qo.Sequential], + ) -> "QueryInterface": + """Chain operations with the query. + + Parameters + ---------- + ops + Operations to perform on the query. + + Returns + ------- + QueryInterface + QueryInterface object with the operations added. + + """ + query = ops(self.query) + + return QueryInterface(self.database, query) + + def union( + self, + other: "QueryInterface", + ) -> "QueryInterface": + """Union the query with another query. + + Parameters + ---------- + other + The other query to union with. + + Returns + ------- + QueryInterface + QueryInterface object with the union operation added. + + """ + query = qo.Union(other.query)(self.query) + + return QueryInterface(self.database, query) + + def union_all( + self, + other: "QueryInterface", + ) -> "QueryInterface": + """Union all the query with another query. + + Parameters + ---------- + other + The other query to union all with. + + Returns + ------- + QueryInterface + QueryInterface object with the union all operation added. + + """ + query = qo.Union(other.query, union_all=True)(self.query) + + return QueryInterface(self.database, query) + + def run( + self, + limit: Optional[int] = None, + backend: Literal["pandas", "dask", "datasets"] = "pandas", + index_col: Optional[str] = None, + n_partitions: Optional[int] = None, + ) -> Union[pd.DataFrame, dd.core.DataFrame]: + """Run the query, and fetch data. + + Parameters + ---------- + limit + No. of rows to limit the query return. + backend + Backend computing framework to use, pandas or dask or datasets. + index_col + Column which becomes the index, and defines the partitioning. + Should be a indexed column in the SQL server, and any orderable type. + n_partitions + Number of partitions. Check dask documentation for additional details. + + Returns + ------- + pandas.DataFrame or dask.DataFrame or datasets.Dataset + Query result. + + """ + self._data = self.database.run_query( + self.query, + limit=limit, + backend=backend, + index_col=index_col, + n_partitions=n_partitions, + ) + + return self._data + + def save( + self, + path: str, + file_format: Literal["parquet", "csv"] = "parquet", + ) -> str: + """Save the query. + + Parameters + ---------- + path + Path where the file will be saved. + file_format + File format of the file to save. + + Returns + ------- + str + Processed save path for upstream use. + + """ + # If the query was already run. + if self._data is not None: + return save_dataframe(self._data, path, file_format=file_format) + + # Save without running. + if file_format == "csv": + path = self.database.save_query_to_csv(self.query, path) + elif file_format == "parquet": + path = self.database.save_query_to_parquet(self.query, path) + else: + raise ValueError("Invalid file format specified.") + + return path + + def clear_data(self) -> None: + """Clear data container. + + Sets the data attribute to None, thus clearing the dataframe contained. + + """ + self._data = None diff --git a/cycquery/mimiciii.py b/cycquery/mimiciii.py new file mode 100644 index 0000000..c6f0654 --- /dev/null +++ b/cycquery/mimiciii.py @@ -0,0 +1,87 @@ +"""MIMIC-III query module. + +Supports querying of MIMIC-III. + +""" + +import logging + +import cycquery.ops as qo +from cycquery.base import DatasetQuerier +from cycquery.interface import QueryInterface +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +class MIMICIIIQuerier(DatasetQuerier): + """MIMIC-III dataset querier.""" + + def diagnoses( + self, + ) -> QueryInterface: + """Query MIMICIII diagnosis data. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table("mimiciii", "diagnoses_icd") + + # Join with diagnoses dimension table. + table = qo.Join( + join_table=self.get_table("mimiciii", "d_icd_diagnoses"), + on=["icd9_code"], + on_to_type=["str"], + )(table) + + return QueryInterface(self.db, table) + + def labevents( + self, + ) -> QueryInterface: + """Query MIMICIII labevents data. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table("mimiciii", "labevents") + + # Join with lab dimension table. + table = qo.Join( + join_table=self.get_table("mimiciii", "d_labitems"), + on=["itemid"], + on_to_type=["str"], + )(table) + + return QueryInterface(self.db, table) + + def chartevents( + self, + ) -> QueryInterface: + """Query MIMICIII chartevents data. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table("mimiciii", "chartevents") + + # Join with dimension table. + table = qo.Join( + join_table=self.get_table("mimiciii", "d_items"), + on=["itemid"], + on_to_type=["str"], + )(table) + + return QueryInterface(self.db, table) diff --git a/cycquery/mimiciv.py b/cycquery/mimiciv.py new file mode 100644 index 0000000..25f6da7 --- /dev/null +++ b/cycquery/mimiciv.py @@ -0,0 +1,161 @@ +"""MIMIC-IV query module. + +Supports querying of MIMICIV-2.0. + +""" + +import logging + +from sqlalchemy import Integer, func, select + +import cycquery.ops as qo +from cycquery.base import DatasetQuerier +from cycquery.interface import QueryInterface +from cycquery.util import get_column +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +class MIMICIVQuerier(DatasetQuerier): + """MIMICIV dataset querier.""" + + def patients( + self, + ) -> QueryInterface: + """Query MIMIC patient data. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + Notes + ----- + The function infers the approximate year a patient received care, using the + `anchor_year` and `anchor_year_group` columns. The `join` and `ops` supplied + are applied after the approximate year is inferred. `dod` is + adjusted based on the inferred approximate year of care. + + """ + table = self.get_table("mimiciv_hosp", "patients") + + # Process and include patient's anchor year. + table = select( + table, + ( + func.substr(get_column(table, "anchor_year_group"), 1, 4).cast(Integer) + ).label("anchor_year_group_start"), + ( + func.substr(get_column(table, "anchor_year_group"), 8, 12).cast(Integer) + ).label("anchor_year_group_end"), + ).subquery() + + # Select the middle of the anchor year group as the anchor year + table = select( + table, + ( + get_column(table, "anchor_year_group_start") + + ( + get_column(table, "anchor_year_group_end") + - get_column(table, "anchor_year_group_start") + ) + / 2 + ).label("anchor_year_group_middle"), + ).subquery() + + table = select( + table, + ( + get_column(table, "anchor_year_group_middle") + - get_column(table, "anchor_year") + ).label("anchor_year_difference"), + ).subquery() + + # Shift relevant columns by anchor year difference + table = qo.AddDeltaColumn("dod", years="anchor_year_difference")(table) + table = qo.Drop( + [ + "anchor_year_group_start", + "anchor_year_group_end", + "anchor_year_group_middle", + ], + )(table) + + return QueryInterface(self.db, table) + + def diagnoses( + self, + ) -> QueryInterface: + """Query MIMIC diagnosis data. + + Parameters + ---------- + join + Join arguments. + ops + Additional operations to apply to the query. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table("mimiciv_hosp", "diagnoses_icd") + + # Join with diagnoses dimension table. + table = qo.Join( + join_table=self.get_table("mimiciv_hosp", "d_icd_diagnoses"), + on=["icd_code", "icd_version"], + on_to_type=["str", "int"], + )(table) + + return QueryInterface(self.db, table) + + def labevents( + self, + ) -> QueryInterface: + """Query lab events from the hospital module. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table("mimiciv_hosp", "labevents") + dim_items_table = self.get_table("mimiciv_hosp", "d_labitems") + + # Join with lab items dimension table. + table = qo.Join( + join_table=dim_items_table, + on=["itemid"], + )(table) + + return QueryInterface(self.db, table) + + def chartevents( + self, + ) -> QueryInterface: + """Query ICU chart events from the ICU module. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed table, wrapped in an interface object. + + """ + table = self.get_table("mimiciv_icu", "chartevents") + dim_items_table = self.get_table("mimiciv_icu", "d_items") + + # Join with items dimension table. + table = qo.Join( + dim_items_table, + on="itemid", + )(table) + + return QueryInterface(self.db, table) diff --git a/cycquery/omop.py b/cycquery/omop.py new file mode 100644 index 0000000..9f3c1bd --- /dev/null +++ b/cycquery/omop.py @@ -0,0 +1,258 @@ +"""OMOP query API.""" + +import logging +from typing import List, Optional, Union + +from sqlalchemy.sql.selectable import Subquery + +import cycquery.ops as qo +from cycquery.base import DatasetQuerier +from cycquery.interface import QueryInterface +from cycquery.utils.common import to_list +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +# OMOP column names. +VISIT_OCCURRENCE_ID = "visit_occurrence_id" +PERSON_ID = "person_id" +VISIT_START_DATETIME = "visit_start_datetime" +VISIT_END_DATETIME = "visit_end_datetime" +VISIT_DETAIL_START_DATETIME = "visit_detail_start_datetime" +VISIT_DETAIL_END_DATETIME = "visit_detail_end_datetime" +VISIT_CONCEPT_ID = "visit_concept_id" +VISIT_TYPE_CONCEPT_ID = "visit_type_concept_id" +VISIT_DETAIL_CONCEPT_ID = "visit_detail_concept_id" +VISIT_DETAIL_TYPE_CONCEPT_ID = "visit_detail_type_concept_id" +CARE_SITE_ID = "care_site_id" +CONCEPT_NAME = "concept_name" +CONCEPT_ID = "concept_id" +CARE_SITE_SOURCE_VALUE = "care_site_source_value" +OBSERVATION_CONCEPT_ID = "observation_concept_id" +OBSERVATION_TYPE_CONCEPT_ID = "observation_type_concept_id" +OBSERVATION_DATETIME = "observation_datetime" +MEASUREMENT_CONCEPT_ID = "measurement_concept_id" +MEASUREMENT_TYPE_CONCEPT_ID = "measurement_type_concept_id" +MEASUREMENT_DATETIME = "measurement_datetime" +UNIT_CONCEPT_ID = "unit_concept_id" +VALUE_AS_CONCEPT_ID = "value_as_concept_id" + +# Created columns. +VISIT_DETAIL_CONCEPT_NAME = "visit_detail_concept_name" +CARE_SITE_NAME = "care_site_name" +GENDER_CONCEPT_NAME = "gender_concept_name" +RACE_CONCEPT_NAME = "race_concept_name" +ETHNICITY_CONCEPT_NAME = "ethnicity_concept_name" + +# Other constants. +ID = "id" +NAME = "name" + + +class OMOPQuerier(DatasetQuerier): + """OMOP querier.""" + + def __init__( + self, + database: str, + user: str, + password: str, + dbms: str = "postgresql", + host: str = "localhost", + port: int = 5432, + schema_name: str = "omop", + ) -> None: + super().__init__( + database=database, + user=user, + password=password, + dbms=dbms, + host=host, + port=port, + ) + self.schema_name = schema_name + + def map_concept_ids_to_name( + self, + src_table: Union[Subquery, QueryInterface], + src_cols: Union[str, List[str]], + dst_cols: Optional[Union[str, List[str]]] = None, + ) -> QueryInterface: + """Map concept IDs in a source table to concept names from concept table. + + For each concept ID column with a name like `somecol_concept_ID`, the mapped + concept name column will be named `somecol_concept_name`. If `dst_cols` is + specified, the mapped concept name column will be named according to the + corresponding name in `dst_cols`. + + Parameters + ---------- + src_table + Source table with concept IDs. + src_cols + Column name(s) to consider as concept IDs for mapping. + dst_cols + Column name(s) to assign for the mapped concept name columns. + + Returns + ------- + cycquery.interface.QueryInterface + Query with mapped columns from concept table. + + """ + if isinstance(src_table, QueryInterface): + src_table = src_table.query + concept_table = self.get_table(self.schema_name, "concept") + src_cols = to_list(src_cols) + if dst_cols: + dst_cols = to_list(dst_cols) + if len(dst_cols) != len(src_cols): + raise ValueError("dst_cols must be same length as src_cols") + + for i, col in enumerate(src_cols): + if ID not in col: + raise ValueError("Specified column not a concept ID column!") + src_table = qo.Join( + concept_table, + on=(col, CONCEPT_ID), + join_table_cols=[CONCEPT_NAME], + isouter=True, + )(src_table) + dst_col_name = dst_cols[i] if dst_cols else col.replace(ID, NAME) + src_table = qo.Rename({CONCEPT_NAME: dst_col_name})(src_table) + + return QueryInterface(self.db, src_table) + + def _map_care_site_id( + self, + source_table: Union[Subquery, QueryInterface], + ) -> QueryInterface: + """Map care_site_id in a source table to care_site table. + + Parameters + ---------- + source_table + Source table with care_site_id. + + Returns + ------- + cycquery.interface.QueryInterface + Query with mapped columns from care_site table. + + """ + if isinstance(source_table, QueryInterface): + source_table = source_table.query + care_site_table = self.get_table(self.schema_name, "care_site") + table = qo.Join( + care_site_table, + on=CARE_SITE_ID, + join_table_cols=[CARE_SITE_NAME, CARE_SITE_SOURCE_VALUE], + isouter=True, + )(source_table) + + return QueryInterface(self.db, table) + + def visit_occurrence( + self, + ) -> QueryInterface: + """Query OMOP visit_occurrence table. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table(self.schema_name, "visit_occurrence") + table = self.map_concept_ids_to_name( + table, + [ + "visit_concept_id", + "visit_type_concept_id", + ], + ) + table = self._map_care_site_id(table) + + return QueryInterface(self.db, table) + + def visit_detail( + self, + ) -> QueryInterface: + """Query OMOP visit_detail table. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table(self.schema_name, "visit_detail") + table = self.map_concept_ids_to_name( + table, + ["visit_detail_concept_id", "visit_detail_type_concept_id"], + ) + + return QueryInterface(self.db, table) + + def person( + self, + ) -> QueryInterface: + """Query OMOP person table. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table(self.schema_name, "person") + table = self.map_concept_ids_to_name( + table, + ["gender_concept_id", "race_concept_id", "ethnicity_concept_id"], + ) + + return QueryInterface(self.db, table) + + def observation( + self, + ) -> QueryInterface: + """Query OMOP observation table. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table(self.schema_name, "observation") + table = self.map_concept_ids_to_name( + table, + [OBSERVATION_CONCEPT_ID, OBSERVATION_TYPE_CONCEPT_ID], + ) + + return QueryInterface(self.db, table) + + def measurement( + self, + ) -> QueryInterface: + """Query OMOP measurement table. + + Returns + ------- + cycquery.interface.QueryInterface + Constructed query, wrapped in an interface object. + + """ + table = self.get_table(self.schema_name, "measurement") + # Cast value_as_concept_id to int. + table = qo.Cast([VALUE_AS_CONCEPT_ID], "int")(table) + table = self.map_concept_ids_to_name( + table, + [MEASUREMENT_CONCEPT_ID, MEASUREMENT_TYPE_CONCEPT_ID, UNIT_CONCEPT_ID], + ) + + return QueryInterface(self.db, table) diff --git a/cycquery/ops.py b/cycquery/ops.py new file mode 100644 index 0000000..94b14a9 --- /dev/null +++ b/cycquery/ops.py @@ -0,0 +1,3262 @@ +"""Query operations.""" + +from __future__ import annotations + +import logging +import operator +import typing +from abc import abstractmethod +from collections import OrderedDict +from datetime import datetime, timedelta +from itertools import islice + +import sqlalchemy +from sqlalchemy import and_, cast, extract, func, literal_column, or_, select +from sqlalchemy.sql.elements import BinaryExpression +from sqlalchemy.sql.expression import literal +from sqlalchemy.sql.selectable import Select, Subquery +from sqlalchemy.types import Boolean + +# Logging. +from cycquery.util import ( + TableTypes, + apply_to_columns, + check_timestamp_columns, + drop_columns, + ends_with, + equals, + filter_columns, + get_column, + get_column_names, + get_columns, + get_delta_column, + greater_than, + has_columns, + has_substring, + in_, + less_than, + not_equals, + process_column, + rename_columns, + reorder_columns, + starts_with, + table_params_to_type, + trim_columns, +) +from cycquery.utils.common import to_datetime_format, to_list, to_list_optional +from cycquery.utils.log import setup_logging + + +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +# ruff: noqa: W505 + + +def _addindent(s_: str, num_spaces: int = 4) -> str: + """Add spaces to a string except the first line. + + Parameters + ---------- + s_ + String to add spaces to. + num_spaces + Number of spaces to add. + + Returns + ------- + str + String with spaces added. + + """ + s = s_.split("\n") + if len(s) == 1: + return s_ + first = s.pop(0) + s = [(num_spaces * " ") + line for line in s] + s = "\n".join(s) # type: ignore + + return first + "\n" + s # type: ignore + + +class QueryOp: + """Base class for query operations.""" + + _ops: typing.Dict[str, "QueryOp"] + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + super().__setattr__("_ops", OrderedDict()) + + @abstractmethod + def __call__(self, *args: typing.Any, **kwargs: typing.Any) -> Subquery: + """Implement a calling function.""" + pass + + def _add_op(self, name: str, op_: "QueryOp") -> None: + """Add a child operation to the current query operation. + + The query op can be accessed as an attribute using the given name. + + Parameters + ---------- + name + Name of the child op. The child op can be accessed from this op using + the given name + op_ + Child op to be added to the parent query op. + + """ + if not isinstance(op_, QueryOp) and op_ is not None: + raise TypeError("{} is not a QueryOp subclass".format(str(op_))) + if not isinstance(name, str): + raise TypeError("Query op name should be a string") + if hasattr(self, name) and name not in self._ops: + raise KeyError("Attribute '{}' already exists".format(name)) + if "." in name: + raise KeyError('Query op name can\'t contain ".", got: {}'.format(name)) + if name == "": + raise KeyError('Query op name can\'t be empty string ""') + self._ops[name] = op_ + + def _get_ops(self) -> typing.Iterator["QueryOp"]: + """Return an iterator over the child operations. + + Returns + ------- + typing.Iterator[QueryOp] + Iterator over the child operations. + + """ + for _, op_ in self._ops.items(): + yield op_ + + def _get_name(self) -> str: + """Get the name of the query op. + + Returns + ------- + str + Name of the query op. + + """ + return self.__class__.__name__ + + def __setattr__(self, name: str, value: "QueryOp") -> None: + """Set an attribute. + + Parameters + ---------- + name + Name of the attribute. + value + Value of the attribute. + + """ + ops = self.__dict__.get("_ops") + if isinstance(value, QueryOp): + if ops is None: + raise AttributeError("Can't assign op before QueryOp.__init__() call") + ops[name] = value + elif ops is not None and name in ops: + if value is not None: + raise TypeError( + "Cannot assign '{}' as child op '{}' " "(QueryOp or None expected)", + ) + ops[name] = value + else: + super().__setattr__(name, value) + + def _extra_repr(self) -> str: + """Set the extra representation of the query op. + + To print customized extra information, you should re-implement + this method in your own query ops. Both single-line and multi-line + strings are acceptable. + + Returns + ------- + str + Extra representation of the query op. + + """ + return "" + + def __repr__(self) -> str: + """Return the string representation of the query op. + + Returns + ------- + str + String representation of the query op. + + """ + extra_lines = [] + extra_repr = self._extra_repr() + if extra_repr: + extra_lines = extra_repr.split("\n") + child_lines = [] + for key, op_ in self._ops.items(): + mod_str = repr(op_) + mod_str = _addindent(mod_str, 2) + child_lines.append("(" + key + "): " + mod_str) + lines = extra_lines + child_lines + main_str = self._get_name() + "(" + if lines: + if len(extra_lines) == 1 and not child_lines: + main_str += extra_lines[0] + else: + main_str += "\n " + "\n ".join(lines) + "\n" + main_str += ")" + + return main_str + + def __getattr__(self, name: str) -> "QueryOp": + """Get an attribute. + + Parameters + ---------- + name + Name of the attribute. + + Returns + ------- + QueryOp + The child operation with the given name. + + """ + if name in self._ops: + return self._ops[name] + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{name}'", + ) + + +def _chain_ops( + query: Subquery, + ops: typing.Iterator[QueryOp], +) -> Subquery: + """Chain query ops. + + Parameters + ---------- + query + Query to chain the ops to. + ops + Query ops to chain. + + Returns + ------- + Subquery + Query with the ops chained. + + """ + for op_ in ops: + if isinstance(op_, Sequential): + query = _chain_ops(query, op_._get_ops()) + elif isinstance(op_, QueryOp): + query = op_(query) + + return query + + +class Sequential(QueryOp): + """Sequential query operations class. + + Chains a sequence of query operations and executes the final query on a table. + + Examples + -------- + >>> Sequential(Drop(["col1", "col2"]), ...) + >>> Sequential([Drop(["col1", "col2"]), ...]) + + """ + + @typing.overload + def __init__(self, *ops: QueryOp) -> None: + ... + + @typing.overload + def __init__(self, ops: typing.List[QueryOp]) -> None: + ... + + @typing.overload + def __init__(self, op: OrderedDict[str, QueryOp]) -> None: + ... + + def __init__(self, *args: QueryOp) -> None: # type: ignore + """Initialize the class. + + Parameters + ---------- + args + Query operations to be chained sequentially. + + """ + super().__init__() + if len(args) == 1 and isinstance(args[0], OrderedDict): + for key, op_ in args[0].items(): + self._add_op(key, op_) + elif len(args) == 1 and isinstance(args[0], list): + for idx, op_ in enumerate(args[0]): + self._add_op(str(idx), op_) + else: + for idx, op_ in enumerate(args): + self._add_op(str(idx), op_) + + def __len__(self) -> int: + """Return the number of query ops in the Sequential. + + Returns + ------- + int + Number of query ops in the Sequential. + + """ + return len(self._ops) + + def __iter__(self) -> typing.Iterator[QueryOp]: + """Return an iterator over the query ops. + + Returns + ------- + typing.Iterator[QueryOp] + Iterator over the query ops. + + """ + return iter(self._ops.values()) + + def __add__(self, other: "Sequential") -> "Sequential": + """Add two Sequential objects. + + Parameters + ---------- + other + Sequential object to be added. + + Returns + ------- + Sequential + Sequential object with the two Sequential objects chained. + + """ + if isinstance(other, Sequential): + ret = Sequential() + for op_ in self: + ret.append(op_) + for op_ in other: + ret.append(op_) + return ret + raise ValueError( + "Add operator supports only objects " + "of Sequential class, but {} is given.".format(str(type(other))), + ) + + def __iadd__(self, other: "Sequential") -> "Sequential": + """Add two Sequential objects inplace. + + Parameters + ---------- + other + Sequential object to be added. + + Returns + ------- + Sequential + Sequential object with the two Sequential objects chained. + + """ + if isinstance(other, Sequential): + offset = len(self) + for i, op_ in enumerate(other): + self._add_op(str(i + offset), op_) + return self + raise ValueError( + "Add operator supports only objects " + "of Sequential class, but {} is given.".format(str(type(other))), + ) + + def _get_item_by_idx( + self, + iterator: typing.Iterator[typing.Any], + idx: int, + ) -> typing.Any: + """Get the idx-th item of the iterator. + + Parameters + ---------- + iterator + Iterator to get the item from. + idx + Index of the item to get. + + Returns + ------- + QueryOp + The idx-th item of the iterator. + + """ + size = len(self) + idx = operator.index(idx) + if not -size <= idx < size: + raise IndexError("index {} is out of range".format(idx)) + idx %= size + + return next(islice(iterator, idx, None)) + + def __getitem__( + self, + idx: typing.Union[slice, int], + ) -> typing.Any: + """Get the idx-th item of the sequential query op. + + Parameters + ---------- + idx + Index of the item to get. + + Returns + ------- + Sequential or QueryOp + The idx-th item of the sequential query op. + + """ + if isinstance(idx, slice): + return self.__class__(OrderedDict(list(self._ops.items())[idx])) + + return self._get_item_by_idx(self._ops.values(), idx) # type: ignore + + def __setitem__(self, idx: int, op_: QueryOp) -> None: + """Set the idx-th item of the sequential query op. + + Parameters + ---------- + idx + Index of the item to set. + op_ + Query op to set. + + """ + key: str = self._get_item_by_idx(self._ops.keys(), idx) # type: ignore + return setattr(self, key, op_) + + def __delitem__(self, idx: typing.Union[slice, int]) -> None: + """Delete the idx-th item of the sequential query op. + + Parameters + ---------- + idx + Index of the item to delete. + + """ + if isinstance(idx, slice): + for key in list(self._ops.keys())[idx]: + delattr(self, key) + else: + key = self._get_item_by_idx(self._ops.keys(), idx) # type: ignore + delattr(self, key) + str_indices = [str(i) for i in range(len(self._ops))] + self._ops = OrderedDict(list(zip(str_indices, self._ops.values()))) + + def append(self, op_: QueryOp) -> "Sequential": + """Append a given query op to the end. + + Parameters + ---------- + op_ + Query op to append. + + Returns + ------- + Sequential + Sequential object with the query op appended. + + """ + self._add_op(str(len(self)), op_) + return self + + def pop(self, key: typing.Union[int, slice]) -> QueryOp: + """Pop the query op at the given index. + + Parameters + ---------- + key + Index of the query op to pop. + + Returns + ------- + QueryOp + Popped query op. + + """ + v = self[key] + del self[key] + + return v # type: ignore + + def insert(self, index: int, op_: QueryOp) -> "Sequential": + """Insert a given query op at the given index. + + Parameters + ---------- + index + Index to insert the query op at. + op_ + Query op to insert. + + Returns + ------- + Sequential + Sequential object with the query op inserted. + + """ + if not isinstance(op_, QueryOp): + raise AssertionError("Module should be of type: {}".format(QueryOp)) + n = len(self._ops) + if not (-n <= index <= n): + raise IndexError("Index out of range: {}".format(index)) + if index < 0: + index += n + for i in range(n, index, -1): + self._ops[str(i)] = self._ops[str(i - 1)] + self._ops[str(index)] = op_ + + return self + + def extend(self, sequential: "Sequential") -> "Sequential": + """Extend the sequential query op with another sequential query op. + + Parameters + ---------- + sequential + Sequential object to extend with. + + Returns + ------- + Sequential + Sequential object with the other sequential query op extended. + + """ + for op_ in sequential: + self.append(op_) + + return self + + @table_params_to_type(Subquery) + def __call__(self, table: TableTypes) -> Subquery: + """Execute the query operations on the table. + + Parameters + ---------- + table + Table to be queried. + + Returns + ------- + Subquery + Query result after chaining the query operations. + + """ + return _chain_ops(table, self._get_ops()) + + +def _append_if_missing( + table: TableTypes, + keep_cols: typing.Optional[typing.Union[str, typing.List[str]]] = None, + force_include_cols: typing.Optional[typing.Union[str, typing.List[str]]] = None, +) -> Subquery: + """Keep only certain columns in a table, but must include certain columns. + + Parameters + ---------- + table + Table on which to perform the operation. + keep_cols + Columns to keep. + force_include_cols + Columns to include (forcefully). + + """ + if keep_cols is None: + return table + keep_cols = to_list(keep_cols) + force_include_cols = to_list(force_include_cols) + extend_cols = [col for col in force_include_cols if col not in keep_cols] + keep_cols = extend_cols + keep_cols + + return Keep(keep_cols)(table) + + +def _none_add(obj1: typing.Any, obj2: typing.Any) -> typing.Any: + """Add two objects together while ignoring None values. + + If both objects are None, returns None. + + Parameters + ---------- + obj1 + First object to add. + obj2 + Second object to add. + + Returns + ------- + typing.Any + Result of adding the two objects. + + """ + if obj1 is None: + return obj2 + if obj2 is None: + return obj1 + return obj1 + obj2 + + +def _process_checks( + table: TableTypes, + cols: typing.Optional[typing.Union[str, typing.List[str]]] = None, + cols_not_in: typing.Optional[typing.Union[str, typing.List[str]]] = None, + timestamp_cols: typing.Optional[typing.Union[str, typing.List[str]]] = None, +) -> Subquery: + """Perform checks, and possibly alterations, on a table. + + Parameters + ---------- + table + Table on which to perform the operation. + cols + Columns to check. + timestamp_cols + Timestamp columns to check. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Checked and possibly altered table. + + """ + if cols is not None: + cols = to_list(cols) + has_columns(table, cols, raise_error=True) + if cols_not_in is not None: + cols_not_in = to_list(cols_not_in) + if has_columns(table, cols_not_in, raise_error=False): + raise ValueError(f"Cannot specify columns {cols_not_in}.") + if timestamp_cols is not None: + timestamp_cols = to_list(timestamp_cols) + has_columns(table, timestamp_cols, raise_error=True) + check_timestamp_columns(table, timestamp_cols, raise_error=True) + + return table + + +class FillNull(QueryOp): + """Fill NULL values with a given value. + + Parameters + ---------- + cols + Columns to fill. + fill_values + Value(s) to fill with. + new_col_names + New column name(s) for the filled columns. If not provided, + + Examples + -------- + >>> FillNull("col1", 0)(table) + >>> FillNull(["col1", "col2"], [0, 1])(table) + >>> FillNull(["col1", "col2"], [0, 1], ["col1_new", "col2_new"])(table) + + """ + + def __init__( + self, + cols: typing.Union[str, typing.List[str]], + fill_values: typing.Union[typing.Any, typing.List[typing.Any]], + new_col_names: typing.Optional[typing.Union[str, typing.List[str]]] = None, + ) -> None: + super().__init__() + self.cols = cols + self.fill_values = fill_values + self.new_col_names = new_col_names + + def __call__(self, table: TableTypes) -> Subquery: + """Fill NULL values with a given value. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + Subquery + Table with NULL values filled. + + """ + cols = to_list(self.cols) + fill_values = to_list(self.fill_values) + new_col_names = to_list_optional(self.new_col_names) + if new_col_names and len(cols) != len(new_col_names): + raise ValueError( + """Number of columns to fill and number of new column names + must match.""", + ) + table = _process_checks(table, cols=self.cols) + if len(fill_values) == 1: + fill_values = fill_values * len(cols) + for col, fill in zip(cols, fill_values): + coalesced_col = func.coalesce(table.c[col], fill).label( + f"coalesced_col_{col}", + ) + table = select([table, coalesced_col]).subquery() + if new_col_names: + for col, new_col in zip(cols, new_col_names): + table = Rename({f"coalesced_col_{col}": new_col})(table) + else: + for col in cols: + table = drop_columns(table, col) + table = Rename({f"coalesced_col_{col}": col})(table) + + return table + + +class Drop(QueryOp): + """Drop some columns. + + Parameters + ---------- + cols + Columns to drop. + + Examples + -------- + >>> Drop("col1")(table) + >>> Drop(["col1", "col2"])(table) + + """ + + def __init__(self, cols: typing.Union[str, typing.List[str]]) -> None: + super().__init__() + self.cols = cols + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table, cols=self.cols) + + return drop_columns(table, self.cols) + + +class Rename(QueryOp): + """Rename some columns. + + Parameters + ---------- + rename_map + Map from an existing column name to another name. + check_exists + Whether to check if all of the keys in the map exist as columns. + + Examples + -------- + >>> Rename({"col1": "col1_new"})(table) + + """ + + def __init__(self, rename_map: typing.Dict[str, str], check_exists: bool = True): + super().__init__() + self.rename_map = rename_map + self.check_exists = check_exists + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if self.check_exists: + table = _process_checks(table, cols=list(self.rename_map.keys())) + + return rename_columns(table, self.rename_map) + + +class Substring(QueryOp): + """Get substring of a string column. + + Parameters + ---------- + col + Name of column which has string, where substring needs + to be extracted. + start_index + Start index of substring. + stop_index + Stop index of substring. + new_col_name + Name of the new column with extracted substring. + + Examples + -------- + >>> Substring("col1", 0, 2, "col1_substring")(table) + + """ + + def __init__( + self, + col: str, + start_index: int, + stop_index: int, + new_col_label: typing.Optional[str] = None, + ): + super().__init__() + self.col = col + self.start_index = start_index + self.stop_index = stop_index + self.new_col_label = new_col_label + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table, cols=self.col, cols_not_in=self.new_col_label) + + return apply_to_columns( + table, + self.col, + lambda x: func.substr( + process_column(x, to_str=True), + self.start_index, + self.stop_index, + ), + new_col_labels=self.new_col_label, + ) + + +class Reorder(QueryOp): + """Reorder the columns in a table. + + Parameters + ---------- + cols + Complete list of table column names in the new order. + + Examples + -------- + >>> Reorder(["col2", "col1"])(table) + + """ + + def __init__(self, cols: typing.List[str]): + super().__init__() + self.cols = cols + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table, cols=self.cols) + + return reorder_columns(table, self.cols) + + +class ReorderAfter(QueryOp): + """Reorder a number of columns to come after a specified column. + + Parameters + ---------- + cols + Ordered list of column names which will come after a specified column. + after + Column name for the column after which the other columns will follow. + + Examples + -------- + >>> ReorderAfter(["col2", "col1"], "col3")(table) + + """ + + def __init__(self, cols: typing.Union[str, typing.List[str]], after: str): + super().__init__() + self.cols = cols + self.after = after + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + self.cols = to_list(self.cols) + table = _process_checks(table, cols=self.cols + [self.after]) + names = get_column_names(table) + names = [name for name in names if name not in self.cols] + name_after_ind = names.index(self.after) + 1 + new_order = names[:name_after_ind] + self.cols + names[name_after_ind:] + + return Reorder(new_order)(table) + + +class Keep(QueryOp): + """Keep only the specified columns in a table. + + Parameters + ---------- + cols + The columns to keep. + + Examples + -------- + >>> Keep("col1")(table) + >>> Keep(["col1", "col2"])(table) + + """ + + def __init__(self, cols: typing.Union[str, typing.List[str]]): + super().__init__() + self.cols = cols + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table, cols=self.cols) + + return filter_columns(table, self.cols) + + +class Trim(QueryOp): + """Trim the whitespace from some string columns. + + Parameters + ---------- + cols + Columns to trim. + new_col_labels + If specified, create new columns with these labels. Otherwise, + apply the function to the existing columns. + + Examples + -------- + >>> Trim("col1")(table) + >>> Trim(["col1", "col2"])(table) + >>> Trim("col1", "col1_trimmed")(table) + >>> Trim(["col1", "col2"], ["col1_trimmed", "col2_trimmed"])(table) + + """ + + def __init__( + self, + cols: typing.Union[str, typing.List[str]], + new_col_labels: typing.Optional[typing.Union[str, typing.List[str]]] = None, + ): + super().__init__() + self.cols = cols + self.new_col_labels = new_col_labels + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table, cols=self.cols) + + return trim_columns(table, self.cols, new_col_labels=self.new_col_labels) + + +class Literal(QueryOp): + """Add a literal column to a table. + + Parameters + ---------- + value + Value of the literal, e.g., a string or integer. + col + Label of the new literal column. + + Examples + -------- + >>> Literal(1, "col1")(table) + + """ + + def __init__(self, value: typing.Any, col: str): + super().__init__() + self.value = value + self.col = col + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table, cols_not_in=self.col) + + return select(table, literal(self.value).label(self.col)).subquery() + + +class ExtractTimestampComponent(QueryOp): + """Extract a component such as year or month from a timestamp column. + + Parameters + ---------- + timestamp_col + Timestamp column from which to extract the time component. + extract_str + Information to extract, e.g., "year", "month" + label + Column label for the extracted column. + + Examples + -------- + >>> ExtractTimestampComponent("col1", "year", "year")(table) + >>> ExtractTimestampComponent("col1", "month", "month")(table) + + """ + + def __init__(self, timestamp_col: str, extract_str: str, label: str): + super().__init__() + self.timestamp_col = timestamp_col + self.extract_str = extract_str + self.label = label + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks( + table, + timestamp_cols=self.timestamp_col, + cols_not_in=self.label, + ) + table = select( + table, + extract(self.extract_str, get_column(table, self.timestamp_col)).label( + self.label, + ), + ) + + return Cast(self.label, "int")(table) + + +class AddNumeric(QueryOp): + """Add a numeric value to some columns. + + Parameters + ---------- + add_to + Column names specifying to which columns is being added. + add + Adds this value to the add_to columns. + new_col_labels + If specified, create new columns with these labels. Otherwise, + apply the function to the existing columns. + + Examples + -------- + >>> AddNumeric("col1", 1)(table) + >>> AddNumeric(["col1", "col2"], 1)(table) + >>> AddNumeric("col1", 1, "col1_plus_1")(table) + >>> AddNumeric(["col1", "col2"], 1, ["col1_plus_1", "col2_plus_1"])(table) + >>> AddNumeric(["col1", "col2"], [1, 2.2])(table) + + """ + + def __init__( + self, + add_to: typing.Union[str, typing.List[str]], + add: typing.Union[int, float, typing.List[int], typing.List[float]], + new_col_labels: typing.Optional[typing.Union[str, typing.List[str]]] = None, + ): + super().__init__() + self.add_to = add_to + self.add = add + self.new_col_labels = new_col_labels + + def _gen_lambda( + self, + add: typing.Union[int, float], + ) -> typing.Callable[[sqlalchemy.sql.schema.Column], sqlalchemy.sql.schema.Column]: + """Generate the lambda function.""" + return lambda x: x + add + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks( + table, + cols=self.add_to, + cols_not_in=self.new_col_labels, + ) + self.add_to = to_list(self.add_to) + if isinstance(self.add, (int, float)) and len(self.add_to) > 1: + add = [self.add] * len(self.add_to) + elif isinstance(self.add, (int, float)) and len(self.add_to) == 1: + add = [self.add] + elif isinstance(self.add, list) and len(self.add_to) != len(self.add): + raise ValueError( + "Length of add_to and add must be the same if add is a list.", + ) + + return apply_to_columns( + table, + self.add_to, + [self._gen_lambda(add_num) for add_num in add], + new_col_labels=self.new_col_labels, + ) + + +class AddDeltaConstant(QueryOp): + """Construct and add a datetime.timedelta object to some columns. + + Parameters + ---------- + add_to + Column names specifying to which columns is being added. + delta + A timedelta object. + new_col_labels + If specified, create new columns with these labels. Otherwise, + apply the function to the existing columns. + + Examples + -------- + >>> AddDeltaConstant("col1", datetime.timedelta(days=1))(table) + >>> AddDeltaConstant(["col1", "col2"], datetime.timedelta(days=1))(table) + >>> AddDeltaConstant("col1", datetime.timedelta(days=1), "col1_plus_1")(table) + + """ + + def __init__( + self, + add_to: typing.Union[str, typing.List[str]], + delta: timedelta, + new_col_labels: typing.Optional[typing.Union[str, typing.List[str]]] = None, + ): + super().__init__() + self.add_to = add_to + self.delta = delta + self.new_col_labels = new_col_labels + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks( + table, + timestamp_cols=self.add_to, + cols_not_in=self.new_col_labels, + ) + + return apply_to_columns( + table, + self.add_to, + lambda x: x + self.delta, + new_col_labels=self.new_col_labels, + ) + + +class AddColumn(QueryOp): + """Add a column to some columns. + + Parameters + ---------- + add_to + Column names specifying to which columns is being added. + col + Column name of column to add to the add_to columns. + negative + Subtract the column rather than adding. + new_col_labels + If specified, create new columns with these labels. Otherwise, + apply the function to the existing columns. + + Examples + -------- + >>> AddColumn("col1", "col2")(table) + >>> AddColumn(["col1", "col2"], "col3")(table) + >>> AddColumn("col1", "col2", negative=True)(table) + >>> AddColumn("col1", "col2", "col1_plus_col2")(table) + >>> AddColumn(["col1", "col2"], "col3", ["col1_plus_col3", "col2_plus_col3"])(table) + + Warning + ------- + Pay attention to column types. Some combinations will work, + whereas others will not. + + """ + + def __init__( + self, + add_to: typing.Union[str, typing.List[str]], + col: str, + negative: typing.Optional[bool] = False, + new_col_labels: typing.Optional[typing.Union[str, typing.List[str]]] = None, + ): + super().__init__() + self.add_to = add_to + self.col = col + self.negative = negative + self.new_col_labels = new_col_labels + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + # If the column being added is a timestamp column, ensure the others are too + if check_timestamp_columns(table, self.col): + table = _process_checks( + table, + timestamp_cols=self.add_to, + cols_not_in=self.new_col_labels, + ) + else: + table = _process_checks( + table, + cols=self.add_to, + cols_not_in=self.new_col_labels, + ) + col = get_column(table, self.col) + if self.negative: + return apply_to_columns( + table, + self.add_to, + lambda x: x - col, + new_col_labels=self.new_col_labels, + ) + + return apply_to_columns( + table, + self.add_to, + lambda x: x + col, + new_col_labels=self.new_col_labels, + ) + + +class AddDeltaColumn(QueryOp): + """Construct and add an interval column to some columns. + + Parameters + ---------- + add_to + Column names specifying to which columns is being added. + negative + Subtract the object rather than adding. + new_col_labels + If specified, create new columns with these labels. Otherwise, + apply the function to the existing columns. + **delta_kwargs + The arguments used to create the Interval column. + + Examples + -------- + >>> AddDeltaColumn("col1", "col2")(table) + >>> AddDeltaColumn(["col1", "col2"], "col3")(table) + >>> AddDeltaColumn("col1", "col2", negative=True)(table) + >>> AddDeltaColumn("col1", "col2", "col1_plus_col2")(table) + + """ + + def __init__( + self, + add_to: typing.Union[str, typing.List[str]], + negative: typing.Optional[bool] = False, + new_col_labels: typing.Optional[typing.Union[str, typing.List[str]]] = None, + **delta_kwargs: typing.Any, + ) -> None: + super().__init__() + self.add_to = add_to + self.negative = negative + self.new_col_labels = new_col_labels + self.delta_kwargs = delta_kwargs + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks( + table, + timestamp_cols=self.add_to, + cols_not_in=self.new_col_labels, + ) + delta = get_delta_column(table, **self.delta_kwargs) + if self.negative: + return apply_to_columns( + table, + self.add_to, + lambda x: x - delta, + new_col_labels=self.new_col_labels, + ) + + return apply_to_columns( + table, + self.add_to, + lambda x: x + delta, + new_col_labels=self.new_col_labels, + ) + + +class Cast(QueryOp): + """Cast columns to a specified type. + + Currently supporting conversions to str, int, float, date, bool and timestamp. + + Parameters + ---------- + cols + Columns to cast. + type_ + Name of type to which to convert. Must be supported. + + Examples + -------- + >>> Cast("col1", "str")(table) + >>> Cast(["col1", "col2"], "int")(table) + >>> Cast("col1", "float")(table) + >>> Cast("col1", "date")(table) + >>> Cast("col1", "bool")(table) + >>> Cast("col1", "timestamp")(table) + + """ + + def __init__(self, cols: typing.Union[str, typing.List[str]], type_: str): + super().__init__() + self.cols = cols + self.type_ = type_ + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table, cols=self.cols) + cast_type_map = { + "str": "to_str", + "int": "to_int", + "float": "to_float", + "date": "to_date", + "bool": "to_bool", + "timestamp": "to_timestamp", + } + # Assert that the type inputted is supported + if self.type_ not in cast_type_map: + supported_str = ", ".join(list(cast_type_map.keys())) + raise ValueError( + f"""Conversion to type {self.type_} not supported. Supporting + conversion to types {supported_str}""", + ) + # Cast + kwargs = {cast_type_map[self.type_]: True} + + return apply_to_columns( + table, + self.cols, + lambda x: process_column(x, **kwargs), + ) + + +class Union(QueryOp): + """Union two tables. + + Parameters + ---------- + union_table + Table to union with the first table. + union_all + Whether to use the all keyword in the union. + + Examples + -------- + >>> Union(table2)(table1) + >>> Union(table2, union_all=True)(table1) + + """ + + def __init__( + self, + union_table: TableTypes, + union_all: typing.Optional[bool] = False, + ): + super().__init__() + self.union_table = union_table + self.union_all = union_all + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + table = _process_checks(table) + union_table = _process_checks(self.union_table) + if self.union_all: + return select(table).union_all(select(union_table)).subquery() + + return select(table).union(select(union_table)).subquery() + + +class Join(QueryOp): + """Join a table with another table. + + Parameters + ---------- + join_table + Table on which to join. + on + A list of strings or tuples representing columns on which to join. + Strings represent columns of same name in both tables. A tuple of + style (table_col, join_table_col) is used to join on columns of + different names. Suggested to specify this parameter as opposed to + cond. + on_to_type + A list of types to which to convert the on columns before joining. Useful when + two columns have the same values but in different format, e.g., strings of int. + cond + Condition on which to join to tables. + table_cols + Filters to keep only these columns from the table. + join_table_cols + Filters to keep only these columns from the join_table. + isouter + Flag to say if the join is a left outer join. + + Examples + -------- + >>> Join(table2, on=["col1", ("col2", "col3")], on_to_type=[int, str])(table1) + >>> Join(table2, table_cols=["col1", "col2"])(table1) + >>> Join(table2, join_table_cols=["col1", "col2"])(table1) + + Warnings + -------- + If neither on nor cond parameters are specified, an + expensive Cartesian product is performed. + + """ + + @table_params_to_type(Subquery) + def __init__( + self, + join_table: TableTypes, + on: typing.Optional[ + typing.Union[ + str, + typing.List[str], + typing.Tuple[str], + typing.List[typing.Tuple[str, str]], + ] + ] = None, + on_to_type: typing.Optional[typing.Union[type, typing.List[type]]] = None, + cond: typing.Optional[BinaryExpression] = None, + table_cols: typing.Optional[typing.Union[str, typing.List[str]]] = None, + join_table_cols: typing.Optional[typing.Union[str, typing.List[str]]] = None, + isouter: typing.Optional[bool] = False, + ) -> None: + super().__init__() + if on is not None and cond is not None: + raise ValueError("Cannot specify both the 'on' and 'cond' arguments.") + + self.join_table = join_table + self.cond = cond + self.on_ = to_list_optional(on) + self.on_to_type = to_list_optional(on_to_type) + self.table_cols = to_list_optional(table_cols) + self.join_table_cols = to_list_optional(join_table_cols) + self.isouter = isouter + + @table_params_to_type(Subquery) + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + # Join on the equality of values in columns of same name in both tables + if self.on_ is not None: + # Process on columns + on_table_cols = [ + col_obj if isinstance(col_obj, str) else col_obj[0] + for col_obj in self.on_ + ] + on_join_table_cols = [ + col_obj if isinstance(col_obj, str) else col_obj[1] + for col_obj in self.on_ + ] + table = _process_checks( + table, + cols=_none_add(self.table_cols, on_table_cols), + ) + self.join_table = _process_checks( + self.join_table, + cols=_none_add(self.join_table_cols, on_join_table_cols), + ) + # Filter columns, keeping those being joined on + table = _append_if_missing(table, self.table_cols, on_table_cols) + self.join_table = _append_if_missing( + self.join_table, + self.join_table_cols, + on_join_table_cols, + ) + # Perform type conversions if given + if self.on_to_type is not None: + for i, type_ in enumerate(self.on_to_type): + table = Cast(on_table_cols[i], type_)(table) + self.join_table = Cast(on_join_table_cols[i], type_)( + self.join_table, + ) + cond = and_( + *[ + get_column(table, on_table_cols[i]) + == get_column(self.join_table, on_join_table_cols[i]) + for i in range(len(on_table_cols)) + ], + ) + table = select(table.join(self.join_table, cond, isouter=self.isouter)) + else: + # Filter columns + if self.table_cols is not None: + table = Keep(self.table_cols)(table) + if self.join_table_cols is not None: + self.join_table = Keep(self.table_cols)(self.join_table) # type: ignore + + # Join on a specified condition + if self.cond is not None: + table = select( + table.join( # type: ignore + self.join_table, + self.cond, + isouter=self.isouter, + ), + ) + # Join on no condition, i.e., a Cartesian product + else: + LOGGER.warning("A Cartesian product has been queried.") + table = select(table, self.join_table) + + # Filter to include no duplicate columns + return select( + *[col for col in table.subquery().columns if "%(" not in col.name], + ).subquery() + + +class ConditionEquals(QueryOp): + """Filter rows based on being equal, or not equal, to some value. + + Parameters + ---------- + col + Column name on which to condition. + value + Value to equal. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + **cond_kwargs + Optional keyword arguments for processing the condition. + + Examples + -------- + >>> ConditionEquals("col1", 1)(table) + >>> ConditionEquals("col1", 1, binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + value: typing.Any, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + **cond_kwargs: typing.Any, + ) -> None: + super().__init__() + self.col = col + self.value = value + self.not_ = not_ + self.binarize_col = binarize_col + self.cond_kwargs = cond_kwargs + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + cond = equals( + get_column(table, self.col), + self.value, + True, + True, + **self.cond_kwargs, + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionGreaterThan(QueryOp): + """Filter rows based on greater than (or equal), to some value. + + Parameters + ---------- + col + Column name on which to condition. + value + Value greater than. + equal + Include equality to the value. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + **cond_kwargs + Optional keyword arguments for processing the condition. + + Examples + -------- + >>> ConditionGreaterThan("col1", 1)(table) + >>> ConditionGreaterThan("col1", 1, binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + value: typing.Any, + equal: bool = False, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + **cond_kwargs: typing.Any, + ) -> None: + super().__init__() + self.col = col + self.value = value + self.equal = equal + self.not_ = not_ + self.binarize_col = binarize_col + self.cond_kwargs = cond_kwargs + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + cond = greater_than( + get_column(table, self.col), + self.value, + True, + True, + self.equal, + **self.cond_kwargs, + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionLessThan(QueryOp): + """Filter rows based on less than (or equal), to some value. + + Parameters + ---------- + col + Column name on which to condition. + value + Value greater than. + equal + Include equality to the value. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + **cond_kwargs + Optional keyword arguments for processing the condition. + + Examples + -------- + >>> ConditionLessThan("col1", 1)(table) + >>> ConditionLessThan("col1", 1, binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + value: typing.Any, + equal: bool = False, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + **cond_kwargs: typing.Any, + ) -> None: + super().__init__() + self.col = col + self.value = value + self.equal = equal + self.not_ = not_ + self.binarize_col = binarize_col + self.cond_kwargs = cond_kwargs + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + cond = less_than( + get_column(table, self.col), + self.value, + True, + True, + self.equal, + **self.cond_kwargs, + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionRegexMatch(QueryOp): + """Filter rows based on matching a regular expression. + + Parameters + ---------- + col + Column name on which to condition. + regex + Regular expression to match. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + + Examples + -------- + >>> ConditionRegexMatch("col1", ".*")(table) + >>> ConditionRegexMatch("col1", ".*", binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + regex: str, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + ): + super().__init__() + self.col = col + self.regex = regex + self.not_ = not_ + self.binarize_col = binarize_col + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + cond = get_column(table, self.col).regexp_match(self.regex) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionIn(QueryOp): + """Filter rows based on having a value in list of values. + + Parameters + ---------- + col + Column name on which to condition. + values + Values in which the column value must be. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + **cond_kwargs + Optional keyword arguments for processing the condition. + + Examples + -------- + >>> ConditionIn("col1", [1, 2])(table) + >>> ConditionIn("col1", [1, 2], binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + values: typing.Union[typing.Any, typing.List[typing.Any]], + not_: bool = False, + binarize_col: typing.Optional[str] = None, + **cond_kwargs: typing.Any, + ) -> None: + super().__init__() + self.col = col + self.values = values + self.not_ = not_ + self.binarize_col = binarize_col + self.cond_kwargs = cond_kwargs + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + cond = in_( + get_column(table, self.col), + to_list(self.values), + True, + True, + **self.cond_kwargs, + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionSubstring(QueryOp): + """Filter rows on based on having substrings. + + Can be specified whether it must have any or all of the specified substrings. + This makes no difference when only one substring is provided + + Parameters + ---------- + col + Column name on which to condition. + substrings + Substrings. + any_ + If true, the row must have just one of the substrings. If false, it must + have all of the substrings. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + **cond_kwargs + Optional keyword arguments for processing the condition. + + Examples + -------- + >>> ConditionSubstring("col1", ["a", "b"])(table) + >>> ConditionSubstring("col1", ["a", "b"], any_=False)(table) + >>> ConditionSubstring("col1", ["a", "b"], binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + substrings: typing.Union[str, typing.List[str]], + any_: bool = True, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + **cond_kwargs: typing.Any, + ) -> None: + super().__init__() + self.col = col + self.substrings = to_list(substrings) + self.any_ = any_ + self.not_ = not_ + self.binarize_col = binarize_col + self.cond_kwargs = cond_kwargs + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + conds = [ + has_substring(get_column(table, self.col), sub, True, **self.cond_kwargs) + for sub in self.substrings + ] + cond = or_(*conds) if self.any_ else and_(*conds) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionStartsWith(QueryOp): + """Filter rows based on starting with some string. + + Parameters + ---------- + col: str + Column name on which to condition. + string: any + String. + not_: bool, default=False + Take negation of condition. + binarize_col: str, optional + If specified, create a Boolean column of name binarize_col instead of filtering. + **cond_kwargs + Optional keyword arguments for processing the condition. + + Examples + -------- + >>> ConditionStartsWith("col1", "a")(table) + >>> ConditionStartsWith("col1", "a", binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + string: str, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + **cond_kwargs: typing.Any, + ) -> None: + super().__init__() + self.col = col + self.string = string + self.not_ = not_ + self.binarize_col = binarize_col + self.cond_kwargs = cond_kwargs + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + cond = starts_with( + get_column(table, self.col), + self.string, + True, + True, + **self.cond_kwargs, + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionEndsWith(QueryOp): + """Filter rows based on ending with some string. + + Parameters + ---------- + col + Column name on which to condition. + string + String to end with. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + **cond_kwargs + Optional keyword arguments for processing the condition. + + Examples + -------- + >>> ConditionEndsWith("col1", "a")(table) + >>> ConditionEndsWith("col1", "a", binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + col: str, + string: str, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + **cond_kwargs: typing.Any, + ) -> None: + super().__init__() + self.col = col + self.string = string + self.not_ = not_ + self.binarize_col = binarize_col + self.cond_kwargs = cond_kwargs + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col, cols_not_in=self.binarize_col) + cond = ends_with( + get_column(table, self.col), + self.string, + True, + True, + **self.cond_kwargs, + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionInYears(QueryOp): + """Filter rows based on a timestamp column being in a list of years. + + Parameters + ---------- + timestamp_col + Timestamp column name. + years + Years in which the timestamps must be. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + + Examples + -------- + >>> ConditionInYears("col1", [2019, 2020])(table) + >>> ConditionInYears("col1", 2019)(table) + >>> ConditionInYears("col1", 2019, binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + timestamp_col: str, + years: typing.Union[int, typing.List[int]], + not_: bool = False, + binarize_col: typing.Optional[str] = None, + ): + super().__init__() + self.timestamp_col = timestamp_col + self.years = years + self.not_ = not_ + self.binarize_col = binarize_col + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks( + table, + cols=self.timestamp_col, + cols_not_in=self.binarize_col, + ) + cond = in_( + extract("year", get_column(table, self.timestamp_col)), + to_list(self.years), + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionInMonths(QueryOp): + """Filter rows based on a timestamp being in a list of years. + + Parameters + ---------- + timestamp_col + Timestamp column name. + months + Months in which the timestamps must be. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + + Examples + -------- + >>> ConditionInMonths("col1", [1, 2])(table) + >>> ConditionInMonths("col1", 1)(table) + >>> ConditionInMonths("col1", 1, binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + timestamp_col: str, + months: typing.Union[int, typing.List[int]], + not_: bool = False, + binarize_col: typing.Optional[str] = None, + ): + super().__init__() + self.timestamp_col = timestamp_col + self.months = months + self.not_ = not_ + self.binarize_col = binarize_col + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks( + table, + cols=self.timestamp_col, + cols_not_in=self.binarize_col, + ) + cond = in_( + extract("month", get_column(table, self.timestamp_col)), + to_list(self.months), + ) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionBeforeDate(QueryOp): + """Filter rows based on a timestamp being before some date. + + Parameters + ---------- + timestamp_col + Timestamp column name. + timestamp + A datetime object or str in YYYY-MM-DD format. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + + Examples + -------- + >>> ConditionBeforeDate("col1", "2020-01-01")(table) + >>> ConditionBeforeDate("col1", datetime.datetime(2020, 1, 1))(table) + >>> ConditionBeforeDate("col1", "2020-01-01", binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + timestamp_col: str, + timestamp: typing.Union[str, datetime], + not_: bool = False, + binarize_col: typing.Optional[str] = None, + ): + super().__init__() + self.timestamp_col = timestamp_col + self.timestamp = timestamp + self.not_ = not_ + self.binarize_col = binarize_col + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, timestamp_cols=self.timestamp_col) + if isinstance(self.timestamp, str): + timestamp = to_datetime_format(self.timestamp) + else: + timestamp = self.timestamp + cond = get_column(table, self.timestamp_col) <= timestamp + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionAfterDate(QueryOp): + """Filter rows based on a timestamp being after some date. + + Parameters + ---------- + timestamp_col + Timestamp column name. + timestamp + A datetime object or str in YYYY-MM-DD format. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + + Examples + -------- + >>> ConditionAfterDate("col1", "2020-01-01")(table) + >>> ConditionAfterDate("col1", datetime.datetime(2020, 1, 1))(table) + >>> ConditionAfterDate("col1", "2020-01-01", binarize_col="col1_bool")(table) + + """ + + def __init__( + self, + timestamp_col: str, + timestamp: typing.Union[str, datetime], + not_: bool = False, + binarize_col: typing.Optional[str] = None, + ): + super().__init__() + self.timestamp_col = timestamp_col + self.timestamp = timestamp + self.not_ = not_ + self.binarize_col = binarize_col + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, timestamp_cols=self.timestamp_col) + if isinstance(self.timestamp, str): + timestamp = to_datetime_format(self.timestamp) + else: + timestamp = self.timestamp + cond = get_column(table, self.timestamp_col) >= timestamp + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class ConditionLike(QueryOp): + """Filter rows by a LIKE condition. + + Parameters + ---------- + col + Column to filter on. + pattern + Pattern to filter on. + not_ + Take negation of condition. + binarize_col + If specified, create a Boolean column of name binarize_col instead of filtering. + + Examples + -------- + >>> ConditionLike("lab_name", "HbA1c")(table) + >>> ConditionLike("lab_name", "HbA1c", not_=True)(table) + >>> ConditionLike("lab_name", "HbA1c", binarize_col="lab_name_bool")(table) + + """ + + def __init__( + self, + col: str, + pattern: str, + not_: bool = False, + binarize_col: typing.Optional[str] = None, + ): + super().__init__() + self.col = col + self.pattern = pattern + self.not_ = not_ + self.binarize_col = binarize_col + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + if return_cond and self.binarize_col: + raise ValueError( + "Cannot return condition and binarize column simultaneously.", + ) + table = _process_checks(table, cols=self.col) + cond = get_column(table, self.col).like(self.pattern) + if self.not_: + cond = cond._negate() + if return_cond: + return cond + if self.binarize_col is not None: + return select( + table, + cast(cond, Boolean).label(self.binarize_col), + ).subquery() + + return select(table).where(cond).subquery() + + +class Or(QueryOp): + """Combine multiple condition query ops using an OR. + + Parameters + ---------- + cond_ops + Condition Query ops to combine. + + Examples + -------- + >>> Or(ConditionLike("lab_name", "HbA1c"), ConditionIn("name", ["John", "Jane"])) + >>> Or([ConditionLike("lab_name", "HbA1c"), ConditionIn("name", ["John", "Jane"])]) + + """ + + def __init__(self, *cond_ops: typing.Union[QueryOp, typing.List[QueryOp]]): + super().__init__() + self.cond_ops = cond_ops + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + ops = [] + for cond_op in self.cond_ops: + if isinstance(cond_op, list): + if len(self.cond_ops) != 1: + raise ValueError("Cannot combine multiple lists of conditions.") + ops = [op(table, return_cond=True) for op in cond_op] + if isinstance(cond_op, QueryOp): + if len(self.cond_ops) == 1: + return cond_op(table, return_cond=return_cond) + ops.append(cond_op(table, return_cond=True)) + cond = or_(*ops) + if return_cond: + return cond + + return select(table).where(cond).subquery() + + +class And(QueryOp): + """Combine multiple condition query ops using an And. + + Parameters + ---------- + ops + Query ops to combine. + + Examples + -------- + >>> And([ConditionLike("lab_name", "HbA1c"), ConditionIn("name", ["John", "Jane"])]) + >>> And(ConditionLike("lab_name", "HbA1c"), ConditionIn("name", ["John", "Jane"])) + + """ + + def __init__(self, *cond_ops: typing.Union[QueryOp, typing.List[QueryOp]]): + super().__init__() + self.cond_ops = cond_ops + + def __call__(self, table: TableTypes, return_cond: bool = False) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + return_cond + Return the condition instead of filtering. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + ops = [] + for cond_op in self.cond_ops: + if isinstance(cond_op, list): + if len(self.cond_ops) != 1: + raise ValueError("Cannot combine multiple lists of conditions.") + ops = [op(table, return_cond=True) for op in cond_op] + if isinstance(cond_op, QueryOp): + if len(self.cond_ops) == 1: + return cond_op(table, return_cond=return_cond) + ops.append(cond_op(table, return_cond=True)) + cond = and_(*ops) + if return_cond: + return cond + + return select(table).where(cond).subquery() + + +class Limit(QueryOp): + """Limit the number of rows returned in a query. + + Parameters + ---------- + number + Number of rows to return in the limit. + + Examples + -------- + >>> Limit(10)(table) + + """ + + def __init__(self, number: int): + super().__init__() + self.number = number + + @table_params_to_type(Select) + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + return table.limit(self.number).subquery() # type: ignore + + +class RandomizeOrder(QueryOp): + """Randomize order of table rows. + + Useful when the data is ordered, so certain rows cannot + be seen or analyzed when limited. + + Examples + -------- + >>> RandomizeOrder()(table) + + Warnings + -------- + Becomes quite slow on large tables. + + """ + + @table_params_to_type(Subquery) + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + return select(table).order_by(func.random()).subquery() + + +class DropNulls(QueryOp): + """Remove rows with null values in some specified columns. + + Parameters + ---------- + cols + Columns in which, if a value is null, the corresponding row is removed. + + Examples + -------- + >>> DropNulls("col1")(table) + >>> DropNulls(["col1", "col2"])(table) + + """ + + def __init__(self, cols: typing.Union[str, typing.List[str]]): + super().__init__() + self.cols = cols + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + self.cols = to_list(self.cols) + table = _process_checks(table, cols=self.cols) + + cond = and_(*[not_equals(get_column(table, col), None) for col in self.cols]) + return select(table).where(cond).subquery() + + +class DropEmpty(QueryOp): + """Remove rows with empty values in some specified columns. + + Parameters + ---------- + cols + Columns in which, if a value is empty, the corresponding row is removed. + + Examples + -------- + >>> DropEmpty("col1")(table) + >>> DropEmpty(["col1", "col2"])(table) + + """ + + def __init__(self, cols: typing.Union[str, typing.List[str]]): + super().__init__() + self.cols = cols + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + self.cols = to_list(self.cols) + table = _process_checks(table, cols=self.cols) + + cond = and_(*[not_equals(get_column(table, col), "") for col in self.cols]) + return select(table).where(cond).subquery() + + +class Apply(QueryOp): + """Apply function(s) to column(s). + + The function can take a sqlalchemy column object and also return a column object. + It can also take multiple columns and return a single column or multiple columns. + If multiple functions are provided, it is assumed that each function is applied to + each input column. + + Parameters + ---------- + cols + Column(s) to apply the function to. + funcs + Function(s) that takes in sqlalchemy column(s) object and returns column(s) + after applying the function or list of functions to apply to each column. + new_cols + New column name(s) after function is applied to the specified column(s). + + Examples + -------- + >>> Apply("col1", lambda x: x + 1)(table) + >>> Apply(["col1", "col2"], [lambda x: x + 1, lambda x: x + 2])(table) + >>> Apply("col1", lambda x: x + 1, new_cols="col1_new")(table) + >>> Apply(["col1", "col2"], lambda x, y: x + y, new_cols="col1_new")(table) + >>> Apply(["col1", "col2"], lambda x, y: (x + y, x - y), new_cols=["col1_new", "col2_new"])(table) # noqa: E501, pylint: disable=line-too-long + + """ + + def __init__( + self, + cols: typing.Union[str, typing.List[str]], + funcs: typing.Union[ + typing.Callable[ + [sqlalchemy.sql.schema.Column], + sqlalchemy.sql.schema.Column, + ], + typing.List[ + typing.Callable[ + [sqlalchemy.sql.schema.Column], + sqlalchemy.sql.schema.Column, + ] + ], + ], + new_cols: typing.Optional[typing.Union[str, typing.List[str]]] = None, + ): + super().__init__() + self.cols = cols + self.funcs = funcs + self.new_cols = new_cols + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + self.new_cols = to_list(self.new_cols) + if isinstance(self.funcs, list): + if len(self.funcs) != len(self.cols): + raise ValueError( + "Number of functions must be equal to number of columns.", + ) + if len(self.new_cols) != len(self.cols): + raise ValueError( + "Number of new columns must be equal to number of columns.", + ) + if callable(self.funcs): + cols = get_columns(table, self.cols) + result_cols = [ + self.funcs(*cols).label(new_col) for new_col in self.new_cols + ] # noqa: E501 + + return select(table).add_columns(*result_cols).subquery() + + return apply_to_columns(table, self.cols, self.funcs, self.new_cols) + + +class OrderBy(QueryOp): + """Order, or sort, the rows of a table by some columns. + + Parameters + ---------- + cols + Columns by which to order. + ascending + Whether to order each columns by ascending (True) or descending (False). + If not provided, orders all by ascending. + + Examples + -------- + >>> OrderBy("col1")(table) + >>> OrderBy(["col1", "col2"])(table) + >>> OrderBy(["col1", "col2"], [True, False])(table) + >>> OrderBy(["col1", "col2"], True)(table) + + """ + + def __init__( + self, + cols: typing.Union[str, typing.List[str]], + ascending: typing.Optional[typing.Union[bool, typing.List[bool]]] = None, + ): + super().__init__() + self.cols = cols + self.ascending = ascending + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + self.cols = to_list(self.cols) + ascending = to_list_optional(self.ascending) + table = _process_checks(table, cols=self.cols) + if ascending is None: + ascending = [True] * len(self.cols) + elif len(ascending) != len(self.cols): + raise ValueError( + "If ascending is specified. Must specify for all columns.", + ) + order_cols = [ + col if ascending[i] else col.desc() + for i, col in enumerate(get_columns(table, self.cols)) + ] + + return select(table).order_by(*order_cols).subquery() + + +class GroupByAggregate(QueryOp): + """Aggregate over a group by object. + + Parameters + ---------- + groupby_cols + Columns by which to group. + aggfuncs + Specify a dictionary of key-value pairs: + column name: aggfunc string or + column name: (aggfunc string, new column label) + This labelling prevents the aggregation of the same column using multiple + aggregation functions. + aggseps + Specify a dictionary of key-value pairs: + column name: string_aggfunc separator + If string_agg used as aggfunc for a column, then a separator must be provided + for the same column. + + Examples + -------- + >>> GroupByAggregate("person_id", {"person_id": "count"})(table) + >>> GroupByAggregate("person_id", {"person_id": ("count", "visit_count")})(table) + >>> GroupByAggregate("person_id", {"lab_name": "string_agg"}, {"lab_name": ", "})(table) + >>> GroupByAggregate("person_id", {"lab_name": ("string_agg", "lab_name_agg"}, {"lab_name": ", "})(table) + + """ + + def __init__( + self, + groupby_cols: typing.Union[str, typing.List[str]], + aggfuncs: typing.Union[ + typing.Dict[str, typing.Sequence[str]], + typing.Dict[str, str], + ], + aggseps: typing.Optional[typing.Dict[str, str]] = None, + ): + super().__init__() + self.groupby_cols = groupby_cols + self.aggfuncs = aggfuncs + if aggseps is None: + aggseps = {} + self.aggseps = aggseps + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + str_to_aggfunc = { + "sum": func.sum, + "average": func.avg, + "min": func.min, + "max": func.max, + "count": func.count, + "median": func.percentile_cont(0.5).within_group, + "string_agg": func.string_agg, + } + + aggfunc_tuples = list(self.aggfuncs.items()) + aggfunc_cols = [item[0] for item in aggfunc_tuples] + aggfunc_strs = [ + item[1] if isinstance(item[1], str) else item[1][0] + for item in aggfunc_tuples + ] + + # If not specified, aggregate column names default to that of + # the column being aggregated over + aggfunc_names = [ + aggfunc_cols[i] if isinstance(item[1], str) else item[1][1] + for i, item in enumerate(aggfunc_tuples) + ] + + groupby_names = to_list(self.groupby_cols) + table = _process_checks(table, cols=groupby_names + aggfunc_cols) + + # Error checking + for i, aggfunc_str in enumerate(aggfunc_strs): + if aggfunc_str not in str_to_aggfunc: + allowed_strs = ", ".join(list(str_to_aggfunc.keys())) + raise ValueError( + f"Invalid aggfuncs specified. Allowed values are {allowed_strs}.", + ) + if aggfunc_str == "string_agg" and ( + not bool(self.aggseps) or aggfunc_cols[i] not in self.aggseps + ): + raise ValueError( + f"""Column {aggfunc_cols[i]} needs to be aggregated as string, must specify a separator!""", # noqa: E501 + ) + + all_names = groupby_names + aggfunc_names + if len(all_names) != len(set(all_names)): + raise ValueError( + """Duplicate column names were found. Try naming aggregated columns + to avoid this issue.""", + ) + + # Perform group by + groupby_cols = get_columns(table, groupby_names) + to_agg_cols = get_columns(table, aggfunc_cols) + agg_cols = [] + for i, to_agg_col in enumerate(to_agg_cols): + if aggfunc_strs[i] == "string_agg": + agg_col = str_to_aggfunc[aggfunc_strs[i]]( + to_agg_col, + literal_column(f"'{self.aggseps[aggfunc_cols[i]]}'"), + ) + else: + agg_col = str_to_aggfunc[aggfunc_strs[i]](to_agg_col) + agg_cols.append(agg_col.label(aggfunc_names[i])) + + return select(*groupby_cols, *agg_cols).group_by(*groupby_cols).subquery() + + +class Distinct(QueryOp): + """Get distinct rows. + + Parameters + ---------- + cols + Columns to use for distinct. + + Examples + -------- + >>> Distinct("person_id")(table) + >>> Distinct(["person_id", "visit_id"])(table) + + """ + + def __init__(self, cols: typing.Union[str, typing.List[str]]): + super().__init__() + self.cols = cols + + def __call__(self, table: TableTypes) -> Subquery: + """Process the table. + + Parameters + ---------- + table + Table on which to perform the operation. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Processed table. + + """ + cols = to_list(self.cols) + table = _process_checks(table, cols=cols) + + return select(table).distinct(*get_columns(table, cols)).subquery() diff --git a/cycquery/orm.py b/cycquery/orm.py new file mode 100644 index 0000000..c8abfae --- /dev/null +++ b/cycquery/orm.py @@ -0,0 +1,313 @@ +"""Object Relational Mapper (ORM) using sqlalchemy.""" + +import csv +import logging +import os +import socket +from dataclasses import dataclass +from typing import Dict, List, Literal, Optional, Union +from urllib.parse import quote_plus + +import dask.dataframe as dd +import pandas as pd +import pyarrow.csv as pv +import pyarrow.parquet as pq +from sqlalchemy import MetaData, create_engine, inspect +from sqlalchemy.engine.base import Engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm.session import Session +from sqlalchemy.sql.selectable import Select + +from cycquery.util import ( + DBSchema, + DBTable, + TableTypes, + get_attr_name, + table_params_to_type, +) +from cycquery.utils.file import exchange_extension, process_file_save_path +from cycquery.utils.log import setup_logging +from cycquery.utils.profile import time_function + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +SOCKET_CONNECTION_TIMEOUT = 5 + + +def _get_db_url( + dbms: str, + user: str, + pwd: str, + host: str, + port: int, + database: str, +) -> str: + """Combine to make Database URL string.""" + return f"{dbms}://{user}:{quote_plus(pwd)}@{host}:{str(port)}/{database}" + + +@dataclass +class DatasetQuerierConfig: + """Configuration for the dataset querier. + + Attributes + ---------- + dbms + Database management system. + host + Hostname of database. + port + Port of database. + database + Name of database. + user + Username for database. + password + Password for database. + + """ + + database: str + user: str + password: str + dbms: str = "postgresql" + host: str = "localhost" + port: int = 5432 + + +class Database: + """Database class. + + Attributes + ---------- + config + Configuration stored in a dataclass. + engine + SQL extraction engine. + inspector + Module for schema inspection. + session + Session for ORM. + is_connected + Whether the database is setup, connected and ready to run queries. + + """ + + def __init__(self, config: DatasetQuerierConfig) -> None: + """Instantiate. + + Parameters + ---------- + config + Path to directory with config file, for overrides. + + """ + self.config = config + self.is_connected = False + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(SOCKET_CONNECTION_TIMEOUT) + try: + is_port_open = sock.connect_ex((self.config.host, self.config.port)) + except socket.gaierror: + LOGGER.error("""Server name not known, cannot establish connection!""") + return + if is_port_open: + LOGGER.error( + """Valid server host but port seems open, check if server is up!""", + ) + return + + self.engine = self._create_engine() + self.session = self._create_session() + self._tables: List[str] = [] + self._setup() + self.is_connected = True + LOGGER.info("Database setup, ready to run queries!") + + def _create_engine(self) -> Engine: + """Create an engine.""" + self.conn = _get_db_url( + self.config.dbms, + self.config.user, + self.config.password, + self.config.host, + self.config.port, + self.config.database, + ) + return create_engine( + _get_db_url( + self.config.dbms, + self.config.user, + self.config.password, + self.config.host, + self.config.port, + self.config.database, + ), + ) + + def _create_session(self) -> Session: + """Create session.""" + self.inspector = inspect(self.engine) + + # Create a session for using ORM. + session = sessionmaker(self.engine) + session.configure(bind=self.engine) + + return session() + + def list_tables(self) -> List[str]: + """List tables in a schema. + + Returns + ------- + List[str] + List of table names. + + """ + return self._tables + + def _setup(self) -> None: + """Prepare ORM DB.""" + meta: Dict[str, MetaData] = {} + schemas = self.inspector.get_schema_names() + for schema_name in schemas: + metadata = MetaData(schema=schema_name) + metadata.reflect(bind=self.engine) + meta[schema_name] = metadata + schema = DBSchema(schema_name, meta[schema_name]) + for table_name in meta[schema_name].tables: + table = DBTable(table_name, meta[schema_name].tables[table_name]) + for column in meta[schema_name].tables[table_name].columns: + setattr(table, column.name, column) + if not isinstance(table.name, str): + table.name = str(table.name) + self._tables.append(table.name) + setattr(schema, get_attr_name(table.name), table) + setattr(self, schema_name, schema) + + @time_function + @table_params_to_type(Select) + def run_query( + self, + query: Union[TableTypes, str], + limit: Optional[int] = None, + backend: Literal["pandas", "dask", "datasets"] = "pandas", + index_col: Optional[str] = None, + n_partitions: Optional[int] = None, + ) -> Union[pd.DataFrame, dd.core.DataFrame]: + """Run query. + + Parameters + ---------- + query + Query to run. + limit + Limit query result to limit. + backend + Backend library to use, Pandas or Dask or HF datasets. + index_col + Column which becomes the index, and defines the partitioning. + Should be a indexed column in the SQL server, and any orderable type. + n_partitions + Number of partitions. Check dask documentation for additional details. + + Returns + ------- + pandas.DataFrame or dask.DataFrame + Extracted data from query. + + """ + if isinstance(query, str) and limit is not None: + raise ValueError( + "Cannot use limit argument when running raw SQL string query!", + ) + if backend in ["pandas", "datasets"] and n_partitions is not None: + raise ValueError( + "Partitions not applicable with pandas or datasets backend, use dask!", + ) + # Limit the results returned. + if limit is not None: + query = query.limit(limit) # type: ignore + + # Run the query and return the results. + with self.session.connection(): + if backend == "pandas": + data = pd.read_sql_query(query, self.engine, index_col=index_col) + elif backend == "dask": + data = dd.read_sql_query( # type: ignore + query, + self.conn, + index_col=index_col, + npartitions=n_partitions, + ) + data = data.reset_index(drop=False) + else: + raise ValueError( + "Invalid backend, can either be pandas or dask or datasets!", + ) + LOGGER.info("Query returned successfully!") + + return data + + @time_function + @table_params_to_type(Select) + def save_query_to_csv(self, query: TableTypes, path: str) -> str: + """Save query in a .csv format. + + Parameters + ---------- + query + Query to save. + path + Save path. + + Returns + ------- + str + Processed save path for upstream use. + + """ + path = process_file_save_path(path, "csv") + + with self.session.connection(): + result = self.engine.execute(query) + with open(path, "w", encoding="utf-8") as file_descriptor: + outcsv = csv.writer(file_descriptor) + outcsv.writerow(result.keys()) + outcsv.writerows(result) + + return path + + @time_function + @table_params_to_type(Select) + def save_query_to_parquet(self, query: TableTypes, path: str) -> str: + """Save query in a .parquet format. + + Parameters + ---------- + query + Query to save. + path + Save path. + + Returns + ------- + str + Processed save path for upstream use. + + """ + path = process_file_save_path(path, "parquet") + + # Save to CSV, load with pyarrow, save to Parquet + csv_path = exchange_extension(path, "csv") + self.save_query_to_csv(query, csv_path) + table = pv.read_csv(csv_path) + os.remove(csv_path) + pq.write_table(table, path) + + return path diff --git a/cycquery/post_process/__init__.py b/cycquery/post_process/__init__.py new file mode 100644 index 0000000..958c2f8 --- /dev/null +++ b/cycquery/post_process/__init__.py @@ -0,0 +1 @@ +"""Query post-process functions for datasets.""" diff --git a/cycquery/post_process/gemini.py b/cycquery/post_process/gemini.py new file mode 100644 index 0000000..6fa5e16 --- /dev/null +++ b/cycquery/post_process/gemini.py @@ -0,0 +1,44 @@ +"""Post-processing functions applied to queried GEMINI data (Pandas DataFrames).""" + +import pandas as pd +from cyclops.query.post_process.util import process_care_unit_changepoints + + +CARE_UNIT_HIERARCHY = [ + "ER", + "Emergency", + "ICU", + "SCU", + "Peri-op", + "Palliative", + "Step-down", + "Rehab", + "Other ward", + "GIM ward", + "IP", +] + + +def process_gemini_care_unit_changepoints(data: pd.DataFrame) -> pd.DataFrame: + """Process GEMINI changepoint care unit information in a hierarchical fashion. + + Using the admit, discharge, and care unit information, create a + changepoint DataFrame usable for aggregation labelling purposes. + If a patient is in multiple care units at a changepoint, the care + unit highest in the hierarchy is selected. + + Parameters + ---------- + data: pandas.DataFrame + The admit, discharge, and care unit information for a single encounter. + Expects columns "admit", "discharge", and CARE_UNIT. + care_unit_hierarchy: list + Ordered list of care units from most relevant to to least. + + Returns + ------- + pandas.DataFrame + Changepoint information with associated care unit. + + """ + return process_care_unit_changepoints(data, CARE_UNIT_HIERARCHY) diff --git a/cycquery/post_process/mimiciv.py b/cycquery/post_process/mimiciv.py new file mode 100644 index 0000000..b0f7d8c --- /dev/null +++ b/cycquery/post_process/mimiciv.py @@ -0,0 +1,155 @@ +"""Post-processing functions applied to queried MIMIC data (Pandas DataFrames).""" + +import pandas as pd + +from cycquery.post_process.util import process_care_unit_changepoints +from cycquery.utils.profile import time_function + + +CARE_UNIT = "care_unit" +ER = "ER" +ICU = "ICU" +IP = "IP" +SCU = "SCU" +CARE_UNIT_MAP = { + IP: { + "observation": ["Observation", "Psychiatry"], + "medicine": ["Medicine", "Medical/Surgical (Gynecology)"], + }, + ER: { + "er": ["Emergency Department", "Emergency Department Observation"], + }, + ICU: { + "icu": [ + "Surgical Intensive Care Unit (SICU)", + "Medical/Surgical Intensive Care Unit (MICU/SICU)", + "Medical Intensive Care Unit (MICU)", + "Trauma SICU (TSICU)", + "Neuro Surgical Intensive Care Unit (Neuro SICU)", + "Cardiac Vascular Intensive Care Unit (CVICU)", + ], + }, + SCU: { + "surgery": [ + "Med/Surg", + "Surgery", + "Surgery/Trauma", + "Med/Surg/Trauma", + "Med/Surg/GYN", + "Surgery/Vascular/Intermediate", + "Thoracic Surgery", + "Transplant", + "Cardiac Surgery", + "PACU", + "Surgery/Pancreatic/Biliary/Bariatric", + ], + "cardiology": [ + "Cardiology", + "Coronary Care Unit (CCU)", + "Cardiology Surgery Intermediate", + "Medicine/Cardiology", + "Medicine/Cardiology Intermediate", + ], + "vascular": [ + "Vascular", + "Hematology/Oncology", + "Hematology/Oncology Intermediate", + ], + "neuro": ["Neurology", "Neuro Intermediate", "Neuro Stepdown"], + "neonatal": [ + "Obstetrics (Postpartum & Antepartum)", + "Neonatal Intensive Care Unit (NICU)", + "Special Care Nursery (SCN)", + "Nursery - Well Babies", + "Obstetrics Antepartum", + "Obstetrics Postpartum", + "Labor & Delivery", + ], + }, +} +NONSPECIFIC_CARE_UNIT_MAP = { + "medicine": IP, + "observation": IP, + "er": ER, + "icu": ICU, + "cardiology": SCU, + "neuro": SCU, + "neonatal": SCU, + "surgery": SCU, + "vascular": SCU, +} +CARE_UNIT_HIERARCHY = [ER, ICU, SCU, IP] + + +def process_mimic_care_unit_changepoints(data: pd.DataFrame) -> pd.DataFrame: + """Process MIMIC changepoint care unit information in a hierarchical fashion. + + Using the admit, discharge, and care unit information, create a + changepoint DataFrame usable for aggregation labelling purposes. + If a patient is in multiple care units at a changepoint, the care + unit highest in the hierarchy is selected. + + Parameters + ---------- + data: pandas.DataFrame + The admit, discharge, and care unit information for a single encounter. + Expects columns "admit", "discharge", and CARE_UNIT. + + Returns + ------- + pandas.DataFrame + Changepoint information with associated care unit. + + """ + return process_care_unit_changepoints(data, CARE_UNIT_HIERARCHY) + + +@time_function +def process_mimic_care_units( + transfers: pd.DataFrame, + specific: bool = False, +) -> pd.DataFrame: + """Process care unit data. + + Processes the MIMIC Transfers table into a cleaned and simplified care + units DataFrame. + + Parameters + ---------- + transfers : pandas.DataFrame + MIMIC transfers table as a DataFrame. + specific : bool, optional + Whether care_unit_name column has specific or non-specific care units. + + Returns + ------- + pandas.DataFrame + Processed care units for MIMIC encounters. + + """ + transfers.rename( + columns={ + "intime": "admit", + "outtime": "discharge", + "careunit": CARE_UNIT, + }, + inplace=True, + ) + + # Drop rows with eventtype discharge. + # Its admit timestamp is the discharge timestamp of eventtype admit. + transfers = transfers[transfers["eventtype"] != "discharge"] + transfers = transfers.drop("eventtype", axis=1) + transfers = transfers[transfers[CARE_UNIT] != "Unknown"] + + # Create replacement dictionary for care unit categories depending on specificity. + replace_dict = {} + for unit, unit_dict in CARE_UNIT_MAP.items(): + for specific_unit, unit_list in unit_dict.items(): + value = specific_unit if specific else unit + replace_dict.update({elem: value for elem in unit_list}) + transfers[CARE_UNIT].replace(replace_dict, inplace=True) + + transfers.dropna(inplace=True) + + return transfers diff --git a/cycquery/post_process/util.py b/cycquery/post_process/util.py new file mode 100644 index 0000000..824ed01 --- /dev/null +++ b/cycquery/post_process/util.py @@ -0,0 +1,112 @@ +"""Post-processing functions applied to queried data (Pandas DataFrames).""" + +from typing import List + +import pandas as pd +from pandas import Timestamp + + +CARE_UNIT = "care_unit" + + +def event_time_between( + event: Timestamp, + admit: pd.Series, + discharge: pd.Series, + admit_inclusive: bool = True, + discharge_inclusive: bool = False, +) -> pd.Series: + """Return whether an event time is between some start and end time. + + May also specify whether the comparison operators are inclusive or not.. + + Parameters + ---------- + event: pandas.Timestamp + Event time. + admit: pandas.Series + A series of timestamps. + discharge: pandas.Series + A series of timestamps. + admit_inclusive: bool + Whether to have an inclusive inequality for the admit condition. + discharge_inclusive: bool + Whether to have an inclusive inequality for the discharge condition. + + Returns + ------- + pandas.Series + A boolean Series representing whether the event is between + the start and end timestamps. + + """ + admit_cond = event >= admit if admit_inclusive else event > admit + + discharge_cond = event <= discharge if discharge_inclusive else event < discharge + + return admit_cond & discharge_cond + + +def process_care_unit_changepoints( + data: pd.DataFrame, + care_unit_hierarchy: List[str], +) -> pd.DataFrame: + """Process changepoint care unit information in a hierarchical fashion. + + Using the admit, discharge, and care unit information, create a + changepoint DataFrame usable for aggregation labelling purposes. + If a patient is in multiple care units at a changepoint, the care + unit highest in the hierarchy is selected. + + Parameters + ---------- + data: pandas.DataFrame + The admit, discharge, and care unit information for a single encounter. + Expects columns "admit", "discharge", and CARE_UNIT. + care_unit_hierarchy: list + Ordered list of care units from most relevant to to least. + + Returns + ------- + pandas.DataFrame + Changepoint information with associated care unit. The care unit + information is relevant up until the next change point + + """ + # Define mapping dictionaries + hierarchy = {care_unit_hierarchy[i]: i for i in range(len(care_unit_hierarchy))} + hierarchy_inv = {i: care_unit_hierarchy[i] for i in range(len(care_unit_hierarchy))} + + # Create changepoints + changepoints = pd.concat([data["admit"], data["discharge"]]) + changepoints.sort_values(inplace=True) + changepoints = changepoints.unique() + + # Remove the final changepoint, which is the final discharge (has no careunit) + changepoints = changepoints[:-1] + + # Select the most relevant care unit for each changepoint + changepoint_data = [] + for changepoint in changepoints: + is_between = event_time_between( + changepoint, + data["admit"], + data["discharge"], + admit_inclusive=True, + discharge_inclusive=False, + ) + care_units = data[is_between][CARE_UNIT].unique() + if len(care_units) > 0: + care_unit_inds = [hierarchy[x] for x in care_units] + care_unit_selected = hierarchy_inv[min(care_unit_inds)] + else: + care_unit_selected = "unknown" + changepoint_data.append([changepoint, care_unit_selected]) + + checkpoint_df = pd.DataFrame(changepoint_data, columns={"changepoint", "care_unit"}) + + # Remove consequtive duplicates, i.e., remove a changepoint if the + # previous changepoint has the same care unit + change_mask = checkpoint_df["care_unit"] != checkpoint_df["care_unit"].shift(-1) + + return checkpoint_df[change_mask] diff --git a/cycquery/util.py b/cycquery/util.py new file mode 100644 index 0000000..a246298 --- /dev/null +++ b/cycquery/util.py @@ -0,0 +1,1229 @@ +"""Utility functions for querying.""" + +import logging +from dataclasses import dataclass +from functools import wraps +from typing import Any, Callable, Dict, List, Optional, Union + +import sqlalchemy +from sqlalchemy import cast, func, select +from sqlalchemy.sql.elements import BinaryExpression +from sqlalchemy.sql.expression import ColumnClause +from sqlalchemy.sql.schema import Column, Table +from sqlalchemy.sql.selectable import Select, Subquery +from sqlalchemy.types import Boolean, Date, DateTime, Float, Integer, Interval, String + +from cycquery.utils.common import to_list, to_list_optional +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + +COLUMN_OBJECTS = [Column, ColumnClause] + + +def get_attr_name(name: str) -> str: + """Get attribute name (second part of first.second).""" + return name.split(".")[-1] + + +@dataclass +class DBSchema: + """Database schema wrapper. + + Parameters + ---------- + name: str + Name of schema. + data: sqlalchemy.sql.schema.MetaData + Metadata for schema. + + """ + + name: str + data: sqlalchemy.sql.schema.MetaData + + +@dataclass +class DBTable: + """Database table wrapper. + + Parameters + ---------- + name: str + Name of table. + data: sqlalchemy.sql.schema.Table + Metadata for schema. + + """ + + name: str + data: sqlalchemy.sql.schema.MetaData + + +TABLE_OBJECTS = [Table, Select, Subquery, DBTable] +TableTypes = Union[Select, Subquery, Table, DBTable] + + +def _to_subquery(table: TableTypes) -> Subquery: + """Convert a table from a table type object to the Subquery type. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + Table to convert. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + The converted table. + + """ + if isinstance(table, Subquery): + return table + + if isinstance(table, Select): + return table.subquery() + + if isinstance(table, Table): + return select(table).subquery() + + if isinstance(table, DBTable): + return select(table.data).subquery() + + raise ValueError( + f"""Table has type {type(table)}, but must have one of the + following types: {", ".join(TABLE_OBJECTS)}""", + ) + + +def _to_select(table: TableTypes) -> Select: + """Convert a table from a table type object to the Select type. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + Table to convert. + + Returns + ------- + sqlalchemy.sql.selectable.Select + The converted table. + + """ + if isinstance(table, Select): + return table + + if isinstance(table, Subquery): + return select(table) + + if isinstance(table, Table): + return select(table) + + if isinstance(table, DBTable): + return select(table.data) + + raise ValueError( + f"""Table has type {type(table)}, but must have one of the + following types: {", ".join(TABLE_OBJECTS)}""", + ) + + +def param_types_to_type( + relevant_types: List[Any], + to_type_fn: Callable[..., Any], +) -> Callable[..., Any]: + """Convert TableTypes parameters to a specified type. + + A decorator which processes a function's arguments by taking all + parameters with type in relevant_types and converting them using + some to_type_fn function. Non-relevant types are left alone. + + Parameters + ---------- + relevant_types : list + Types to process. + to_type_fn : Callable + Function to process the relevant types + + Returns + ------- + Callable + The processed function. + + """ + + def decorator(func_: Callable[..., Any]) -> Callable[..., Any]: + """Decorate function to convert TableTypes parameters to a specified type.""" + + @wraps(func_) + def wrapper_func(*args: Any, **kwargs: Any) -> Any: + # Convert relevant arguments. + args_list = list(args) + for i, arg in enumerate(args_list): + if type(arg) in relevant_types: + args_list[i] = to_type_fn(arg) + + # Convert relevant keyword arguments. + kwargs = dict(kwargs) + for key, kwarg in kwargs.items(): + if type(kwarg) in relevant_types: + kwargs[key] = to_type_fn(kwarg) + + return func_(*tuple(args_list), **kwargs) + + return wrapper_func + + return decorator + + +def table_params_to_type(to_type: TableTypes) -> Callable[..., Any]: + """Decorate to convert TableTypes params to a specified type. + + Parameters + ---------- + to_type: cyclops.query.util.TableTypes + The type to which to convert. + + Returns + ------- + Callable + The processed function. + + """ + # Dictionary mapping query type -> query type conversion function. + table_to_type_fn_map = { + Subquery: _to_subquery, + Select: _to_select, + Table: lambda x: x, + DBTable: lambda x: x, + } + if to_type not in TABLE_OBJECTS: + raise ValueError(f"to_type must be in {TABLE_OBJECTS}") + + to_type_fn = table_to_type_fn_map[to_type] + + return param_types_to_type(TABLE_OBJECTS, to_type_fn) + + +@table_params_to_type(Subquery) +def get_column( + table: TableTypes, + col: str, +) -> Column: + """Extract a column object from a table by name. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table with the column. + col: str + Name of column to extract. + + Returns + ------- + sqlalchemy.sql.schema.Column + The corresponding column in the table. + + """ + col_names = get_column_names(table) + if col not in col_names: + raise ValueError(f"Table does not contain column {col}") + + return table.c[col_names.index(col)] # type: ignore + + +@table_params_to_type(Subquery) +def filter_columns( + table: TableTypes, + cols: Union[str, List[str]], +) -> Subquery: + """Filter a table, keeping only the specified columns. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table with the column. + cols: str or list of str + Name of columns to keep. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + Table with only the specified columns. + + """ + cols = to_list(cols) + col_names = get_column_names(table) + filtered = [] + for col in cols: + if col not in col_names: + continue + filtered.append(table.c[col_names.index(col)]) # type: ignore + + return select(filtered).subquery() + + +@table_params_to_type(Subquery) +def get_columns( + table: TableTypes, + cols: Union[str, List[str]], +) -> List[Column]: + """Extract a number of columns from the table. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + cols: str or list of str + Names of columns to extract. + + Returns + ------- + list of sqlalchemy.sql.schema.Column + The corresponding columns in the table. + + """ + return [get_column(table, col) for col in to_list(cols)] + + +@table_params_to_type(Subquery) +def get_column_names(table: TableTypes) -> List[str]: + """Extract column names from a table. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + + Returns + ------- + list of str + The table column names. + + """ + return [c.name for c in table.columns] # type: ignore + + +@table_params_to_type(Subquery) +def has_columns( + table: TableTypes, + cols: Union[str, List[str]], + raise_error: bool = False, +) -> bool: + """Check whether a table has all of the specified columns. + + Parameters + ---------- + table : cyclops.query.util.TableTypes + Table to check. + cols: str or list of str + Required columns. + raise_error: bool + Whether to raise an error if the required columns are not found. + + Returns + ------- + bool + True if all required columns are present, otherwise False. + + """ + cols = to_list(cols) + required_set = set(cols) + columns = set(get_column_names(table)) + present = required_set.issubset(columns) + + if raise_error and not present: + missing = required_set - columns + raise ValueError(f"Missing required columns {', '.join(missing)}.") + + return present + + +@table_params_to_type(Subquery) +def assert_table_has_columns( + *args: Any, + **kwargs: Any, +) -> Callable[[TableTypes], TableTypes]: + """Assert that TableTypes params have the necessary columns. + + assert_table_has_columns(["A", "B"], None) is equivalent to + assert_table_has_columns(["A", "B"]) but may be necessary when + wanting to check, assert_table_has_columns(["A"], None, ["C"]) + + Can also check keyword arguments, e.g., optional queries, + assert_table_has_columns(["A"], kwarg_table=["D"]) + + Parameters + ---------- + *args + Ordered arguments corresponding to the function's table-type args. + **kwargs + Keyword arguments corresponding to the function's table-type kwargs. + + Returns + ------- + Callable + Decorator function. + + """ + + def decorator( + func_: Callable[..., Any], + ) -> Callable[..., Any]: + @wraps(func_) + def wrapper_func(*fn_args: Any, **fn_kwargs: Any) -> Any: + # Check only the table arguments + table_args = [i for i in fn_args if isinstance(i, Subquery)] + + assert len(args) <= len(table_args) + + for i, arg in enumerate(args): + if arg is None: # Can specify None to skip over checking a query + continue + has_columns(table_args[i], arg, raise_error=True) + + for key, required_cols in kwargs.items(): + # If an optional table is not provided, or is None, + # it is skipped + if key not in fn_kwargs: + continue + + if fn_kwargs[key] is None: + continue + + assert isinstance(fn_kwargs[key], Subquery) + has_columns(fn_kwargs[key], required_cols, raise_error=True) + + return func_(*fn_args, **fn_kwargs) + + return wrapper_func + + return decorator # type: ignore + + +@table_params_to_type(Subquery) +def drop_columns( + table: TableTypes, + drop_cols: Union[str, List[str]], +) -> Subquery: + """Drop, or remove, some columns from a table. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + col : str or list of str + Names of columns to drop. + drop_cols: str or list of str + Names of columns to drop. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + The corresponding table with columns dropped. + + """ + drop_cols = get_columns(table, drop_cols) + + return select(*[c for c in table.c if c not in drop_cols]).subquery() # type: ignore + + +@table_params_to_type(Subquery) +def rename_columns(table: TableTypes, rename_map: Dict[str, str]) -> Subquery: + """Rename a table's columns. + + Rename the table's columns according to a dictionary of strings, + where the key is the current name, and the value is the replacement. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + rename_map : dict + Dictionary mapping current column names (key) to new ones (value). + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + The corresponding table with columns renamed. + + """ + return select( + *[ + c.label(rename_map[c.name]) if c.name in rename_map else c + for c in table.columns # type: ignore + ], + ).subquery() + + +@table_params_to_type(Subquery) +def reorder_columns(table: TableTypes, cols: List[str]) -> Subquery: + """Reorder a table's columns. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table to reorder. + cols : list of str + New order of columns, which must include all existing columns. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + The reordered table. + + """ + # Get the old/new column names. + old_order = get_column_names(table) + new_order = [c.name for c in get_columns(table, cols)] + + # Make sure we have exactly the same set of old/new column names. + if set(old_order) != set(new_order): + old_order_print = ", ".join(old_order) + new_order_print = ", ".join(new_order) + raise ValueError( + f"""Must specify all columns {old_order_print} + to re-order, not {new_order_print}.""", + ) + + # Reorder the columns. + new_cols = [] + for col in new_order: + new_cols.append(table.c[old_order.index(col)]) # type: ignore + + return select(*new_cols).subquery() + + +@table_params_to_type(Subquery) +def apply_to_columns( + table: TableTypes, + col_names: Union[str, List[str]], + funcs: Union[ + Callable[[sqlalchemy.sql.schema.Column], sqlalchemy.sql.schema.Column], + List[Callable[[sqlalchemy.sql.schema.Column], sqlalchemy.sql.schema.Column]], + ], + new_col_labels: Optional[Union[str, List[str]]] = None, +) -> Subquery: + """Apply a function to some columns. + + This function can change existing columns or create new + columns depending on whether new_col_labels is specified. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + col_names: str or list of str + Columns to which to apply the function. + funcs: callable or list of callable + Function(s) to apply to the columns, where the function takes an column + as its only parameter and returns another column object. + new_col_labels: str or list of str, optional + If specified, create new columns with these labels. Otherwise, + apply the function to the existing columns. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + The table with function applied. + + """ + col_names = to_list(col_names) + new_col_labels = to_list_optional(new_col_labels) + cols = get_columns(table, col_names) + if isinstance(funcs, list): + if len(funcs) != len(cols): + raise ValueError( + f"Must specify a function for each column, not {len(funcs)} functions.", + ) + else: + funcs = [funcs] * len(cols) + if new_col_labels is None: + # Apply to existing columns + prev_order = get_column_names(table) + table = select(table).add_columns( + *[ + funcs[i](col).label("__" + col_names[i] + "__") + for i, col in enumerate(cols) + ], + ) + rename = {"__" + name + "__": name for name in col_names} + table = drop_columns(table, col_names) + table = rename_columns(table, rename) + table = reorder_columns(table, prev_order) + else: + # Apply to new columns + new_cols = [ + funcs[i](col).label(new_col_labels[i]) for i, col in enumerate(cols) + ] + table = select(table).add_columns(*new_cols) + + return _to_subquery(table) + + +def trim_columns( + table: TableTypes, + cols: Union[str, List[str]], + new_col_labels: Optional[Union[str, List[str]]] = None, +) -> Subquery: + """Trim, or strip, specified columns. + + Trimming refers to the removal of leading/trailing whitespace. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + cols: str or list of str + Names of columns to trim. + new_col_labels: str or list of str, optional + If specified, create new columns with these labels. Otherwise, + apply the function to the existing columns. + + Returns + ------- + sqlalchemy.sql.selectable.Subquery + The table with the specified columns trimmed. + + """ + return apply_to_columns( + table, + cols, + lambda x: process_column(x, to_str=True, trim=True), + new_col_labels=new_col_labels, + ) + + +def process_elem(elem: Any, **kwargs: bool) -> Any: + """Preprocess some basic object such as an integer, float, or string. + + Parameters + ---------- + elem: any + An element such as an integer, float, or string. + **kwargs : dict, optional + Preprocessing keyword arguments. + + Returns + ------- + Any + The preprocessed element. + + """ + # Extract kwargs. + lower = kwargs.get("lower", False) + trim = kwargs.get("trim", False) + to_str = kwargs.get("to_str", False) + to_int = kwargs.get("to_int", False) + to_float = kwargs.get("to_float", False) + to_bool = kwargs.get("to_bool", False) + + # Convert to string. + if to_str: + elem = str(elem) + + # If a string. + if isinstance(elem, str): + if lower: + elem = elem.lower() + + if trim: + elem = elem.strip() + + if to_int: + elem = int(elem) + + if to_float: + elem = float(elem) + + if to_bool: + elem = bool(elem) + + return elem + + +def process_list(lst: Union[Any, List[Any]], **kwargs: bool) -> List[Any]: + """Preprocess a list of elements. + + Parameters + ---------- + lst : any or list of any + A list of elements such as integers, floats, or strings. + **kwargs : dict, optional + Preprocessing keyword arguments. + + Returns + ------- + Any + The preprocessed element. + + """ + # Convert potentially non-list variable to list. + lst = to_list(lst) + + # Process elements. + return [process_elem(i, **kwargs) for i in lst] + + +def process_column(col: Column, **kwargs: bool) -> Column: + """Preprocess a Column object. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + A column to preprocess. + **kwargs : dict, optional + Preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.schema.Column + The processed column. + + """ + # Extract kwargs. + lower = kwargs.get("lower", False) + trim = kwargs.get("trim", False) + to_str = kwargs.get("to_str", False) + to_int = kwargs.get("to_int", False) + to_float = kwargs.get("to_float", False) + to_bool = kwargs.get("to_bool", False) + to_date = kwargs.get("to_date", False) + to_timestamp = kwargs.get("to_timestamp", False) + + # Convert to string. + if to_str: + col = cast(col, String) + + # If a string column. + if "VARCHAR" in str(col.type): + # Lower column. + if lower: + col = func.lower(col) + + # Trim whitespace. + if trim: + col = func.trim(col) + + if to_int: + col = cast(col, Integer) + + if to_float: + col = cast(col, Float) + + if to_bool: + col = cast(col, Boolean) + + if to_date: + col = cast(col, Date) + + if to_timestamp: + col = cast(col, DateTime) + + return col + + +def equals( + col: Column, + value: Any, + lower: bool = True, + trim: bool = True, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column has some value. + + Assumes that if searching for a string, both the value and column values + should be converted to lowercase and trimmed of leading/trailing whitespace. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + value : Any + The value to match in the column. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + trim : bool, default=True + Whether to trim (strip) whitespace on the value and column. + This is only relevant when the column/value are strings. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + return process_column(col, lower=lower, trim=trim, **kwargs) == process_elem( + value, + lower=lower, + trim=trim, + **kwargs, + ) + + +def greater_than( + col: Column, + value: Any, + lower: bool = True, + trim: bool = True, + equal: bool = False, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column is greater than some value. + + Assumes that if searching for a string, both the value and column values + should be converted to lowercase and trimmed of leading/trailing whitespace. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + value : Any + The value to match in the column. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + trim : bool, default=True + Whether to trim (strip) whitespace on the value and column. + This is only relevant when the column/value are strings. + equal: bool, default=False + Whether to also include equal to the value. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + if equal: + return process_column(col, lower=lower, trim=trim, **kwargs) >= process_elem( + value, + lower=lower, + trim=trim, + **kwargs, + ) + return process_column(col, lower=lower, trim=trim, **kwargs) > process_elem( + value, + lower=lower, + trim=trim, + **kwargs, + ) + + +def less_than( + col: Column, + value: Any, + lower: bool = True, + trim: bool = True, + equal: bool = False, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column is less than some value. + + Assumes that if searching for a string, both the value and column values + should be converted to lowercase and trimmed of leading/trailing whitespace. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + value : Any + The value to match in the column. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + trim : bool, default=True + Whether to trim (strip) whitespace on the value and column. + This is only relevant when the column/value are strings. + equal: bool, default=False + Whether to also include equal to the value. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + if equal: + return process_column(col, lower=lower, trim=trim, **kwargs) <= process_elem( + value, + lower=lower, + trim=trim, + **kwargs, + ) + return process_column(col, lower=lower, trim=trim, **kwargs) < process_elem( + value, + lower=lower, + trim=trim, + **kwargs, + ) + + +def not_equals( + col: Column, + value: Any, + lower: bool = True, + trim: bool = True, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column is not equal to some value. + + Assumes that if searching for a string, both the value and column values + should be converted to lowercase and trimmed of leading/trailing whitespace. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + value : Any + The value to match in the column. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + trim : bool, default=True + Whether to trim (strip) whitespace on the value and column. + This is only relevant when the column/value are strings. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + return process_column(col, lower=lower, trim=trim, **kwargs) != process_elem( + value, + lower=lower, + trim=trim, + **kwargs, + ) + + +def has_string_format( + col: Column, + value: Any, + fmt: str, + to_str: bool = True, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column has some string formatting. + + Assumes that we're searching for a string, performing + the relevant conversion. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + value: Any + A value to be implanted in the string formatting. + fmt : str + The string format to match in the column. + to_str : bool, default=True + Whether to convert the value/column to string type. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + return process_column(col, to_str=to_str, **kwargs).like( + fmt.format(process_elem(value, to_str=to_str, **kwargs)), + ) + + +def has_substring( + col: Column, + substring: Any, + lower: bool = True, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column has some substring. + + Assumes that we're searching for a string, where both the value and + column values should be converted to strings and made lowercase. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + substring : Any + The substring to match in the column. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + return has_string_format(col, substring, "%%{}%%", lower=lower, **kwargs) + + +def starts_with( + col: Column, + value: Any, + lower: bool = True, + trim: bool = True, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column starts with some value/string. + + Assumes that we're searching for a string, where both the value and + column values should be converted to strings, made lowercase, and + trimmed of leading/trailing whitespace. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + value : Any + The value to match at the start. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + trim : bool, default=True + Whether to trim (strip) whitespace on the value and column. + This is only relevant when the column/value are strings. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + return has_string_format(col, value, "{}%%", lower=lower, trim=trim, **kwargs) + + +def ends_with( + col: Column, + value: Any, + lower: bool = True, + trim: bool = True, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column ends with some value/string. + + Assumes that we're searching for a string, where both the value and + column values should be converted to strings, made lowercase, and + trimmed of leading/trailing whitespace. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + value : Any + The value to match at the end. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + trim : bool, default=True + Whether to trim (strip) whitespace on the value and column. + This is only relevant when the column/value are strings. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + return has_string_format(col, value, "%%{}", lower=lower, trim=trim, **kwargs) + + +def in_( + col: Column, + lst: List[Any], + lower: bool = True, + trim: bool = True, + **kwargs: bool, +) -> BinaryExpression: + """Condition that a column value is in a list of values. + + Assumes that if searching for a string, both the value and column values + should be converted to lowercase and trimmed of leading/trailing whitespace. + + Parameters + ---------- + col : sqlalchemy.sql.schema.Column + The column to condition. + lst : list of any + The value to match at the start. + lower : bool, default=True + Whether to convert the value and column to lowercase. + This is only relevant when the column/value are strings. + trim : bool, default=True + Whether to trim (strip) whitespace on the value and column. + This is only relevant when the column/value are strings. + **kwargs : dict, optional + Remaining preprocessing keyword arguments. + + Returns + ------- + sqlalchemy.sql.elements.BinaryExpression + An expression representing where the condition was satisfied. + + """ + return process_column(col, lower=lower, trim=trim, **kwargs).in_( + process_list(lst, lower=lower, trim=trim, **kwargs), + ) + + +def _check_column_type( + table: TableTypes, + cols: Union[str, List[str]], + types: Union[Any, List[Any]], + raise_error: bool = False, +) -> bool: + """Check whether some columns are each one of a number of types. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + cols: str or list of str + Column names to check. + types: any + The allowed types for each column. + raise_error: bool + Whether to raise an error if one of the columns are none of the types. + + Returns + ------- + bool + Whether all of the columns are one of the types. + + """ + cols = to_list(cols) + types = to_list(types) + is_type = [ + any(isinstance(get_column(table, col).type, type_) for type_ in types) + for col in cols + ] + if raise_error and not all(is_type): + incorrect_type = list( + set(cols) - {col for i, col in enumerate(cols) if is_type[i]}, + ) + types_str = ", ".join([type_.__name__ for type_ in types]) + actual_types_str = [type(col).__name__ for col in incorrect_type] + raise ValueError( + f"""{incorrect_type} columns are not one of types {types_str}. + They have types {actual_types_str}.""", + ) + + return all(is_type) + + +def check_timestamp_columns( + table: TableTypes, + cols: Union[str, List[str]], + raise_error: bool = False, +) -> bool: + """Check whether some columns are Date or DateTime columns. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + cols: str or list of str + Column names to check. + raise_error: bool + Whether to raise an error if one of the columns are none of the types. + + Returns + ------- + bool + Whether all of the columns are one of the types. + + """ + return _check_column_type(table, cols, [Date, DateTime], raise_error=raise_error) + + +@table_params_to_type(Subquery) +def get_delta_column( + table: TableTypes, + years: Optional[str] = None, + months: Optional[str] = None, + days: Optional[str] = None, + hours: Optional[str] = None, +) -> Column: + """Create a time delta column. + + Create a time delta (interval) column from a number of + numeric timestamp columns. + + Warning: Null values in each specified numeric time column are coalesced to 0. + + Parameters + ---------- + table: cyclops.query.util.TableTypes + The table. + years: None or str + Years column. + months: None or str + Months column. + days: None or str + Days column. + hours: None or str + Hours column. + + Returns + ------- + sqlalchemy.sql.schema.Column + Combined delta/interval column. + + """ + + def get_col_or_none(col: Optional[str] = None) -> Optional[Column]: + """If col is not None, get interval column from names.""" + return None if col is None else get_column(table, col) + + years = get_col_or_none(years) + months = get_col_or_none(months) + days = get_col_or_none(days) + hours = get_col_or_none(hours) + + time_cols = [years, months, days, hours] + names = ["YEARS", "MONTHS", "DAYS", "HOURS"] + + # Consider only the non-null columns. + names = [names[i] for i in range(len(names)) if time_cols[i] is not None] + time_cols = [col for col in time_cols if col is not None] + + if len(time_cols) == 0: + raise ValueError("One or more time interval columns must be specified.") + + # Create interval columns. + interval_cols = [] + for i, col in enumerate(time_cols): + interval_cols.append( + func.cast(func.concat(func.coalesce(col, 0), " " + names[i]), Interval), + ) + + # Create combined interval column. + combined_interval_col = interval_cols[0] + for i in range(1, len(interval_cols)): + combined_interval_col = combined_interval_col + interval_cols[i] + + return combined_interval_col diff --git a/cycquery/utils/__init__.py b/cycquery/utils/__init__.py new file mode 100644 index 0000000..88c371e --- /dev/null +++ b/cycquery/utils/__init__.py @@ -0,0 +1 @@ +"""Utility package.""" diff --git a/cycquery/utils/common.py b/cycquery/utils/common.py new file mode 100644 index 0000000..af93240 --- /dev/null +++ b/cycquery/utils/common.py @@ -0,0 +1,203 @@ +"""Common utility functions that can be used across multiple cyclops packages.""" + +import warnings +from datetime import datetime +from typing import Any, List, Optional, Union + +import numpy as np +import numpy.typing as npt +import pandas as pd +from pandas.errors import PerformanceWarning + + +def to_timestamp(data: Union[pd.Series, npt.NDArray[Any]]) -> pd.Series: + """Convert a Pandas series or NumPy array to a datetime/timestamp type. + + Parameters + ---------- + data: pandas.Series or numpy.ndarray + Data to be converted. + + Returns + ------- + pandas.Series + The converted data. + + """ + if isinstance(data, pd.Series): + return pd.to_datetime(data) + + if isinstance(data, np.ndarray): + return pd.to_datetime(pd.Series(data)) + + raise ValueError(f"Type of data argument ({type(data)}) not supported.") + + +def add_years_approximate( + timestamp_series: pd.Series, + years_series: pd.Series, +) -> pd.Series: + """Approximately add together a timestamp series with a years series row-by-row. + + Approximates are typically exact or incorrect by one day, e.g., on leap days. + + Parameters + ---------- + timestamp_series: pandas.Series + The series of timestamps to which to add. + years_series: panadas.Series + The series of years to add. + + Returns + ------- + pandas.Series + The timestamp series with the approximately added years. + + """ + # Add to the years column + year = timestamp_series.dt.year + years_series + + # Handle the other columns + month = timestamp_series.dt.month + day = timestamp_series.dt.day + hour = timestamp_series.dt.hour + minute = timestamp_series.dt.minute + + # Create new timestamp column + data = pd.DataFrame( + {"year": year, "month": month, "day": day, "hour": hour, "minute": minute}, + ) + + # Subtract 1 from potentially invalid leap days to avoid issues + leap_days = (month == 2) & (day == 29) + data["day"][leap_days] -= 1 + + return pd.to_datetime(data) + + +def add_years_exact(timestamp_series: pd.Series, years_series: pd.Series) -> pd.Series: + """Add together a timestamp series with a years series row-by-row. + + Warning: Very slow. It is worth using the add_years_approximate function even + moderately large data. + + Parameters + ---------- + timestamp_series: pandas.Series + The series of timestamps to which to add. + years_series: panadas.Series + The series of years to add. + + Returns + ------- + pandas.Series + The timestamp series with the approximately added years. + + """ + warnings.warn( + ( + "Computing the exact addition cannot be vectorized and is very slow. " + "Consider using the quick, approximate calculation." + ), + PerformanceWarning, + stacklevel=1, + ) + return timestamp_series + years_series.apply(lambda x: pd.DateOffset(years=x)) + + +def to_list(obj: Any) -> List[Any]: + """Convert some object to a list of object(s) unless already one. + + Parameters + ---------- + obj : any + The object to convert to a list. + + Returns + ------- + list + The processed object. + + """ + if isinstance(obj, list): + return obj + + if isinstance(obj, (np.ndarray, set, dict)): + return list(obj) + + return [obj] + + +def to_list_optional( + obj: Optional[Any], + none_to_empty: bool = False, +) -> Union[List[Any], None]: + """Convert some object to a list of object(s) unless already None or a list. + + Parameters + ---------- + obj : any + The object to convert to a list. + none_to_empty: bool, default = False + If true, return a None obj as an empty list. Otherwise, return as None. + + Returns + ------- + list or None + The processed object. + + """ + if obj is None: + if none_to_empty: + return [] + return None + + return to_list(obj) + + +def to_datetime_format(date: str, fmt: str = "%Y-%m-%d") -> datetime: + """Convert string date to datetime. + + Parameters + ---------- + date: str + Input date in string format. + fmt: str, optional + Date formatting string. + + Returns + ------- + datetime + Date in datetime format. + + """ + return datetime.strptime(date, fmt) + + +def list_swap(lst: List[Any], index1: int, index2: int) -> List[Any]: + """Swap items in a list given the item index and new item index. + + Parameters + ---------- + lst: list + List in which elements will be swapped. + index1: int + Index of first item to swap. + index2: int + Index of second item to swap. + + Returns + ------- + list + List with elements swapped. + + """ + if not 0 <= index1 < len(lst): + raise ValueError("index 1 is out of range.") + + if not 0 <= index2 < len(lst): + raise ValueError("index 2 is out of range.") + + lst[index1], lst[index2] = lst[index2], lst[index1] + + return lst diff --git a/cycquery/utils/file.py b/cycquery/utils/file.py new file mode 100644 index 0000000..597920a --- /dev/null +++ b/cycquery/utils/file.py @@ -0,0 +1,457 @@ +"""Utility functions for saving/loading files.""" + +import logging +import os +import pickle +from typing import Any, Generator, List, Optional, Union + +import dask.dataframe as dd +import numpy as np +import pandas as pd + +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +def join(*paths: str) -> str: + """Robustly join paths. + + os.path.join only may cause problems with some filepaths (especially on Windows). + + Parameters + ---------- + paths: str + file paths + + Returns + ------- + str + The joined path of all input paths. + + """ + return os.path.join(*paths).replace("\\", "/") + + +def exchange_extension(file_path: str, new_ext: str) -> str: + """Exchange one file extension for another. + + Parameters + ---------- + file_path: str + File path in which to exchange the extension. + new_ext: str + New extension to replace the existing extension. + + Returns + ------- + str + File path with the new extension. + + """ + # Remove a leading dot + new_ext = new_ext.strip(".") + _, old_ext = os.path.splitext(file_path) + return file_path[: -len(old_ext)] + "." + new_ext + + +def process_file_save_path( + save_path: str, + file_format: str, + create_dir: bool = True, +) -> str: + """Process file save path, perform checks, and possibly create a parent directory. + + Parameters + ---------- + save_path: str + Path where the file will be saved. + file_format: str + File format of the file to save. + create_dir: bool + If True, create the parent directory path if needed. + + Returns + ------- + str + The processed save path. + + """ + # Create the directory if it doesn't already exist. + directory, _ = os.path.split(save_path) + + if create_dir and directory != "": + os.makedirs(directory, exist_ok=True) + + # Add the .parquet extension if it isn't there. + _, ext = os.path.splitext(save_path) + + if ext == "": + save_path = save_path + "." + file_format + elif ext != "." + file_format: + raise ValueError( + f"""The file extension on the save path must be {file_format}. + Alternatively, sesider changing the file format.""", + ) + + return save_path + + +def process_dir_save_path(save_path: str, create_dir: bool = True) -> str: + """Process directory save path, perform checks, and possibly create the directory. + + Parameters + ---------- + save_path: str + Path where the file will be saved. + create_dir: bool + If True, create the directory if needed. + + Returns + ------- + str + The processed save path. + + """ + if os.path.exists(save_path): + if os.path.isdir(save_path): + return save_path + raise ValueError("If save path exists, it must be a directory.") + + if create_dir: + os.makedirs(save_path) + return save_path + + raise ValueError("Directory does not exist.") + + +def save_dataframe( + data: Union[pd.DataFrame, dd.core.DataFrame], + save_path: str, + file_format: str = "parquet", + log: bool = True, +) -> str: + """Save a pandas.DataFrame or dask.DataFrame object to file. + + Parameters + ---------- + data: pandas.DataFrame + Dataframe to save. + save_path: str + Path where the file will be saved. + file_format: str + File format of the file to save. + log: bool + Whether to log the occurence. + + Returns + ------- + str + Processed save path for upstream use. + + """ + if not isinstance(data, (pd.DataFrame, dd.core.DataFrame)): + raise ValueError("Input data is not a DataFrame.") + save_path = process_file_save_path(save_path, file_format) + if isinstance(data, dd.core.DataFrame): + save_path, _ = os.path.splitext(save_path) + if log: + LOGGER.info("Saving dataframe to %s", save_path) + if file_format == "parquet": + if isinstance(data, pd.DataFrame): + data.to_parquet(save_path, schema=None) + if isinstance(data, dd.core.DataFrame): + data.to_parquet( # type: ignore + save_path, + schema=None, + name_function=lambda x: f"batch-{str(x).zfill(3)}.parquet", + ) + elif file_format == "csv": + data.to_csv(save_path) + else: + raise ValueError( + "Invalid file formated provided. Currently supporting 'parquet' and 'csv'.", + ) + + return save_path + + +def load_dataframe( + load_path: str, + file_format: str = "parquet", + log: bool = True, +) -> Union[pd.DataFrame, dd.core.DataFrame]: + """Load file to a pandas.DataFrame or dask.DataFrame object. + + Parameters + ---------- + load_path: str + Path where the file to load. + file_format: str + File format of the file to load. + log: bool + Whether to log the occurence. + + Returns + ------- + pandas.DataFrame or dask.DataFrame + Loaded data. + + """ + is_dask = True + if not os.path.isdir(load_path): + load_path = process_file_save_path(load_path, file_format) + is_dask = False + if log: + LOGGER.info("Loading DataFrame from %s", load_path) + if file_format == "parquet": + data_reader = dd.read_parquet if is_dask else pd.read_parquet # type: ignore + data = data_reader(load_path) + elif file_format == "csv": + data = pd.read_csv(load_path, index_col=[0]) + else: + raise ValueError( + "Invalid file formated provided. Currently supporting 'parquet' and 'csv'.", + ) + + return data + + +def save_array( + data: np.typing.ArrayLike, + save_path: str, + file_format: str = "npy", + log: bool = True, +) -> str: + """Save a numpy.ndarray object to file. + + Parameters + ---------- + data: numpy.ndarray + Array to save. + save_path: str + Path where the file will be saved. + file_format: str + File format of the file to save. + log: bool + Whether to log the occurence. + + Returns + ------- + str + Processed save path for upstream use. + + """ + save_path = process_file_save_path(save_path, file_format) + + if not isinstance(data, np.ndarray): + raise ValueError("Input data is not an array.") + + if log: + LOGGER.info("Saving array to %s", save_path) + + if file_format == "npy": + np.save(save_path, data) + else: + raise ValueError("Invalid file formated provided. Currently supporting 'npy'.") + + return save_path + + +def load_array( + load_path: str, + file_format: str = "npy", + log: bool = True, +) -> Any: + """Load file to a numpy.ndarray object. + + Parameters + ---------- + load_path: str + Path where the file to load. + file_format: str + File format of the file to load. + log: bool + Whether to log the occurence. + + Returns + ------- + numpy.ndarray + Loaded data. + + """ + load_path = process_file_save_path(load_path, file_format) + + if log: + LOGGER.info("Loading array from %s", load_path) + + if file_format == "npy": + data = np.load(load_path) + else: + raise ValueError("Invalid file formated provided. Currently supporting 'npy'.") + + if not isinstance(data, np.ndarray): + raise ValueError("Loaded data is not an array.") + + return data + + +def save_pickle( + data: Any, + save_path: str, + log: bool = True, +) -> str: + """Save a object to pickle file. + + Parameters + ---------- + data: any + Data to save. + save_path: str + Path where the file will be saved. + log: bool + Whether to log the occurence. + + Returns + ------- + str + Processed save path for upstream use. + + """ + save_path = process_file_save_path(save_path, "pkl") + + if log: + LOGGER.info("Pickling data to %s", save_path) + + with open(save_path, "wb") as handle: + pickle.dump(data, handle) + + return save_path + + +def load_pickle( + load_path: str, + log: bool = True, +) -> Any: + """Load an object from a pickle file. + + Parameters + ---------- + load_path: str + Path where the file to load. + log: bool + Whether to log the occurence. + + Returns + ------- + any + Loaded data. + + """ + load_path = process_file_save_path(load_path, "pkl") + + if log: + LOGGER.info("Loading pickled data from %s", load_path) + + with open(load_path, "rb") as handle: + return pickle.load(handle) + + +def listdir_nonhidden(path: str) -> List[str]: + """List the non-hidden files of a directory. + + Parameters + ---------- + path: str + Directory path. + + Returns + ------- + list + List of non-hidden files. + + """ + return [f for f in os.listdir(path) if not f.startswith(".")] + + +def yield_dataframes( + dir_path: str, + sort: bool = True, + skip_n: Optional[int] = None, + log: bool = True, +) -> Generator[pd.DataFrame, None, None]: + """Yield DataFrames loaded from a directory. + + Any non-hidden files in the directory must be loadable as a DataFrame. + + Parameters + ---------- + dir_path: str + Directory path of files. + sort: bool, default = True + Whether to sort the files and yield them in an ordered manner. + skip_n: int, optional + If specified, skip the first n files when yielding the files. + This is especially useful in lieu of the execution being interrupted. + log: bool + Whether to log the occurence. + + Yields + ------ + pandas.DataFrame + A DataFrame. + + """ + files = list(listdir_nonhidden(dir_path)) + + if sort: + files.sort() + + if skip_n: + files = files[skip_n:] + + for file in files: + yield load_dataframe(join(dir_path, file), log=log) + + +def yield_pickled_files( + dir_path: str, + sort: bool = True, + skip_n: Optional[int] = None, + log: bool = True, +) -> Generator[pd.DataFrame, None, None]: + """Yield pickled files loaded from a directory. + + Any non-hidden files in the directory must be loadable with pickle. + + Parameters + ---------- + dir_path: str + Directory path of files. + sort: bool, default = True + Whether to sort the files and yield them in an ordered manner. + skip_n: int, optional + If specified, skip the first n files when yielding the files. + This is especially useful in lieu of the execution being interrupted. + log: bool + Whether to log the occurence. + + Yields + ------ + any + Previously pickled data. + + """ + files = list(listdir_nonhidden(dir_path)) + + if sort: + files.sort() + + if skip_n: + files = files[skip_n:] + + for file in files: + yield load_pickle(join(dir_path, file), log=log) diff --git a/cycquery/utils/log.py b/cycquery/utils/log.py new file mode 100644 index 0000000..c83e2e8 --- /dev/null +++ b/cycquery/utils/log.py @@ -0,0 +1,138 @@ +"""Python logging function.""" + +import logging +from typing import Optional, Union + + +LOG_FORMAT = "%(asctime)-15s %(levelname)-5s %(name)-15s - %(message)s" + +# https://stackoverflow.com/questions/384076/how-can-i-color-python-logging-output +BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8) +COLORS = { + "WARNING": YELLOW, + "INFO": WHITE, + "DEBUG": BLUE, + "CRITICAL": YELLOW, + "ERROR": RED, +} + +# The background is set with 40 plus the number of the color, and the foreground 30. +# These are the sequences need to get colored ouput. +RESET_SEQ = "\033[0m" +COLOR_SEQ = "\033[1;%dm" +BOLD_SEQ = "\033[1m" + + +def formatter_message(message: str, use_color: Optional[bool] = True) -> str: + """Format message. + + Parameters + ---------- + message: str + Message format. + use_color: bool, optional + Use colored logging. + + Returns + ------- + str + Formatted message. + + """ + if use_color: + message = message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ) + else: + message = message.replace("$RESET", "").replace("$BOLD", "") + return message + + +class Formatter(logging.Formatter): + """Formatter. + + Attributes + ---------- + use_color: bool + Flag to toggle use of color. + + """ + + def __init__( + self, + msg: Optional[str] = None, + use_color: bool = True, + ) -> None: + """Instantiate. + + Parameters + ---------- + msg: str + Message format. + use_color: bool + Flag to set using colored formatting. + + """ + if msg is None: + msg = formatter_message(LOG_FORMAT, True) + logging.Formatter.__init__(self, msg) + self.use_color = use_color + + def format(self, record: logging.LogRecord) -> str: # noqa: A003 + """Apply formatting. + + Parameters + ---------- + record: logging.LogRecord + Record object for logging. + + Returns + ------- + str + Formatted string for log. + + """ + levelname = record.levelname + if self.use_color and levelname in COLORS: + levelname_color = ( + COLOR_SEQ % (30 + COLORS[levelname]) + levelname + RESET_SEQ + ) + record.levelname = levelname_color + return logging.Formatter.format(self, record) + + +def setup_logging( + log_path: Optional[str] = None, + log_level: Union[int, str] = "DEBUG", + print_level: Union[int, str] = "INFO", + logger: Optional[logging.Logger] = None, + use_color: bool = True, +) -> None: + """Create logger, and set it up. + + Parameters + ---------- + log_path : str, optional + Path to output log file. + log_level : str, optional + Log level for logging, defaults to DEBUG. + print_level : str, optional + Print level for logging, defaults to INFO. + logger : logging.Logger, optional + Pass logger if already exists, else a new logger object is created. + use_color: bool, optional + Use colored logging. + + """ + fmt = formatter_message(LOG_FORMAT, use_color) + logger = logger if logger else logging.getLogger() + logger.setLevel(log_level) + logger.handlers = [] + + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(Formatter(fmt, use_color=use_color)) + stream_handler.setLevel(print_level) + logger.addHandler(stream_handler) + + if log_path: + file_handler = logging.FileHandler(log_path) + file_handler.setFormatter(Formatter(fmt, use_color=use_color)) + logger.addHandler(file_handler) diff --git a/cycquery/utils/profile.py b/cycquery/utils/profile.py new file mode 100644 index 0000000..089d2a3 --- /dev/null +++ b/cycquery/utils/profile.py @@ -0,0 +1,37 @@ +"""Useful functions for timing, profiling.""" + +import logging +import time +from typing import Any, Callable, Dict, List + +from cycquery.utils.log import setup_logging + + +# Logging. +LOGGER = logging.getLogger(__name__) +setup_logging(print_level="INFO", logger=LOGGER) + + +def time_function(func: Callable[..., Any]) -> Callable[..., Any]: + """Time decorator function. + + Parameters + ---------- + func: function + Function to apply decorator. + + Returns + ------- + Callable + Wrapper function to apply as decorator. + + """ + + def wrapper_func(*args: List[Any], **kwargs: Dict[str, Any]) -> Any: + start_time = time.time() + result = func(*args, **kwargs) + time_taken = time.time() - start_time + LOGGER.info("Finished executing function %s in %f s", func.__name__, time_taken) + return result + + return wrapper_func diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d0c3cbf --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..061f32f --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_static/cyclical_diagram.jpg b/docs/source/_static/cyclical_diagram.jpg new file mode 100644 index 0000000..8aa0d5f Binary files /dev/null and b/docs/source/_static/cyclical_diagram.jpg differ diff --git a/docs/source/_static/cyclops_arch.png b/docs/source/_static/cyclops_arch.png new file mode 100644 index 0000000..2e5bd1a Binary files /dev/null and b/docs/source/_static/cyclops_arch.png differ diff --git a/docs/source/_templates/custom-class-template.rst b/docs/source/_templates/custom-class-template.rst new file mode 100644 index 0000000..f73eda5 --- /dev/null +++ b/docs/source/_templates/custom-class-template.rst @@ -0,0 +1,34 @@ +{{ fullname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + {% block methods %} + {% if methods %} + .. rubric:: {{ _('Methods') }} + + .. autosummary:: + :nosignatures: + {% for item in methods %} + {%- if not item.startswith('_') %} + ~{{ name }}.{{ item }} + {%- endif -%} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Attributes') }} + + .. autosummary:: + {% for item in attributes %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/source/_templates/custom-module-template.rst b/docs/source/_templates/custom-module-template.rst new file mode 100644 index 0000000..d066d0e --- /dev/null +++ b/docs/source/_templates/custom-module-template.rst @@ -0,0 +1,66 @@ +{{ fullname | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {% if attributes %} + .. rubric:: Module attributes + + .. autosummary:: + :toctree: + {% for item in attributes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block functions %} + {% if functions %} + .. rubric:: {{ _('Functions') }} + + .. autosummary:: + :toctree: + :nosignatures: + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: {{ _('Classes') }} + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + :nosignatures: + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + .. autosummary:: + :toctree: + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + +{% block modules %} +{% if modules %} +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: +{% for item in modules %} + {{ item }} +{%- endfor %} +{% endif %} +{% endblock %} diff --git a/docs/source/api.rst b/docs/source/api.rst new file mode 100644 index 0000000..a476b07 --- /dev/null +++ b/docs/source/api.rst @@ -0,0 +1,6 @@ +API Reference +============= + +.. toctree:: + + reference/api/cycquery.rst diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..9b77080 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,93 @@ +"""Configuration file for the Sphinx documentation builder.""" + +# pylint: disable-all + +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +import os +import sys + + +sys.path.insert(0, os.path.abspath("../../cycquery")) + + +# -- Project information ----------------------------------------------------- + +project = "cyclops-query" +copyright = "2022, Vector AI Engineering" # noqa: A001 +author = "Vector AI Engineering" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.autosummary", + "sphinx.ext.viewcode", + "sphinx.ext.intersphinx", + "sphinx_autodoc_typehints", + "myst_parser", + "sphinx_copybutton", + "nbsphinx", + "IPython.sphinxext.ipython_console_highlighting", +] +autosummary_generate = True +napoleon_google_docstring = False +napoleon_numpy_docstring = True +napoleon_include_init_with_doc = True +napoleon_attr_annotations = True +add_module_names = False +autoclass_content = "both" +autodoc_inherit_docstrings = True +set_type_checking_flag = True +autosectionlabel_prefix_document = True +copybutton_prompt_text = r">>> |\.\.\. " +copybutton_prompt_is_regexp = True + +intersphinx_mapping = { + "python": ("https://docs.python.org/3.9/", None), + "numpy": ("http://docs.scipy.org/doc/numpy/", None), + "pandas": ("https://pandas.pydata.org/docs/", None), + "sqlalchemy": ("https://docs.sqlalchemy.org", None), + "sklearn": ("https://scikit-learn.org/stable/", None), + "dask": ("https://docs.dask.org/en/stable/", None), + "torch": ("https://pytorch.org/docs/stable/", None), +} + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["**.ipynb_checkpoints"] +source_suffix = [".rst", ".md"] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "theme" +html_theme_path = ["."] + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst new file mode 100644 index 0000000..07f23a2 --- /dev/null +++ b/docs/source/contributing.rst @@ -0,0 +1,37 @@ +Contributing to cyclops-query +============================= + +Thanks for your interest in contributing to the cyclops-query tool! + +To submit PRs, please fill out the PR template along with the PR. If the +PR fixes an issue, don’t forget to link the PR to the issue! + +Pre-commit hooks +---------------- + +Once the python virtual environment is setup, you can run pre-commit +hooks using: + +.. code:: bash + + pre-commit run --all-files + +Coding guidelines +----------------- + +For code style, we recommend the `google style +guide `__. + +Pre-commit hooks apply the +`black `__ +code formatting. + +For docstrings we use `numpy +format `__. + +We use `ruff `__ for further static +code analysis. The pre-commit hooks show errors which you need to fix +before submitting a PR. + +Last but not the least, we use type hints in our code which is then +checked using `mypy `__. diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..adb9b40 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,12 @@ +Welcome to cyclops-query's documentation! +========================================= + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + intro + contributing + tutorials + api diff --git a/docs/source/intro.rst b/docs/source/intro.rst new file mode 100644 index 0000000..a71062b --- /dev/null +++ b/docs/source/intro.rst @@ -0,0 +1,82 @@ +# cyclops-query +=============== + +|PyPI| |code checks| |integration tests| |docs| |codecov| |license| + +``cyclops-query`` is a tool for querying EHR databases. + +🐣 Getting Started +================== + +Installing cyclops-query using pip +---------------------------------- + +.. code:: bash + + python3 -m pip install cycquery + +🧑🏿‍💻 Developing +======================= + +Using poetry +------------ + +The development environment can be set up using +`poetry `__. Hence, make +sure it is installed and then run: + +.. code:: bash + + python3 -m poetry install + source $(poetry env info --path)/bin/activate + +API documentation is built using +`Sphinx `__ and can be locally +built by: + +.. code:: bash + + cd docs + make html SPHINXOPTS="-D nbsphinx_allow_errors=True" + +Contributing +------------ + +Contributing to ``cyclops-query`` is welcomed. See +`Contributing `__ +for guidelines. + +📚 `Documentation `__ +======================================================================= + +🎓 Citation +=========== + +Reference to cite when you use ``cyclops-query`` in a project or a +research paper: + +:: + + @article {Krishnan2022.12.02.22283021, + author = {Krishnan, Amrit and Subasri, Vallijah and McKeen, Kaden and Kore, Ali and Ogidi, Franklin and Alinoori, Mahshid and Lalani, Nadim and Dhalla, Azra and Verma, Amol and Razak, Fahad and Pandya, Deval and Dolatabadi, Elham}, + title = {CyclOps: Cyclical development towards operationalizing ML models for health}, + elocation-id = {2022.12.02.22283021}, + year = {2022}, + doi = {10.1101/2022.12.02.22283021}, + publisher = {Cold Spring Harbor Laboratory Press}, + URL = {https://www.medrxiv.org/content/early/2022/12/08/2022.12.02.22283021}, + journal = {medRxiv} + } + +.. |PyPI| image:: https://img.shields.io/pypi/v/cycquery + :target: https://pypi.org/project/cycquery +.. |code checks| image:: https://github.com/VectorInstitute/cyclops-query/actions/workflows/code_checks.yml/badge.svg + :target: https://github.com/VectorInstitute/cyclops-query/actions/workflows/code_checks.yml +.. |integration tests| image:: https://github.com/VectorInstitute/cyclops-query/actions/workflows/integration_tests.yml/badge.svg + :target: https://github.com/VectorInstitute/cyclops-query/actions/workflows/integration_tests.yml +.. |docs| image:: https://github.com/VectorInstitute/cyclops-query/actions/workflows/docs_deploy.yml/badge.svg + :target: https://github.com/VectorInstitute/cyclops-query/actions/workflows/docs_deploy.yml +.. |codecov| image:: https://codecov.io/gh/VectorInstitute/cyclops-query/branch/main/graph/badge.svg + :target: https://codecov.io/gh/VectorInstitute/cyclops-query +.. |license| image:: https://img.shields.io/github/license/VectorInstitute/cyclops-query.svg + :target: https://github.com/VectorInstitute/cyclops-query/blob/main/LICENSE diff --git a/docs/source/reference/api/cycquery.rst b/docs/source/reference/api/cycquery.rst new file mode 100644 index 0000000..187de7b --- /dev/null +++ b/docs/source/reference/api/cycquery.rst @@ -0,0 +1,30 @@ +.. role:: hidden + :class: hidden-section + +cycquery +======== + +.. automodule:: cycquery + +.. autosummary:: + :toctree: _autosummary + :nosignatures: + :template: custom-module-template.rst + + interface + ops + base + +dataset APIs +------------ + +.. autosummary:: + :toctree: _autosummary + :nosignatures: + :template: custom-module-template.rst + + mimiciii + mimiciv + eicu + omop + gemini diff --git a/docs/source/theme/static/css/cyclops.css b/docs/source/theme/static/css/cyclops.css new file mode 100644 index 0000000..45fbc6f --- /dev/null +++ b/docs/source/theme/static/css/cyclops.css @@ -0,0 +1,326 @@ +@import url('https://fonts.googleapis.com/css2?family=Lato:ital,wght@0,100;0,300;0,400;0,700;0,900;1,100;1,300;1,400;1,700;1,900&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:ital,wght@0,400;0,700;1,400;1,700&display=swap'); +@import "theme.css"; /* From sphinx_rtd_theme */ + +html { + --text-color: #24292e; + --heading-color: #404040; + --link-color: #5097ba; + --sidebar-background-color: #2e76d49d; + --content-background-color: #ffffff; +} + +body { + font-family: 'Roboto', sans-serif; + font-weight: 400; + color: var(--text-color); + line-height: 1.5; +} + +.wy-menu-vertical a { color: #98a8f5;; } + +h1, h2, h3, h4, h5, h6, legend, .rst-content .toctree-wrapper p.caption, .rst-content .sidebar .sidebar-title { + font-family: Lato, 'Helvetica Neue', sans-serif; + font-weight: 600; + color: var(--heading-color); +} + +p { + line-height: inherit; +} + +a { + color: var(--link-color); +} +/* underlign lins on hover */ +a:hover { + text-decoration: underline; +} + +/* Monospace typography */ +footer span.commit code, +.rst-content pre.literal-block, +.rst-content div[class^='highlight'] pre, +.rst-content .linenodiv pre, +.rst-content tt, +.rst-content code, +.rst-content pre, +.rst-content kbd, +.rst-content samp { + font-family: 'IBM Plex Mono', monospace; + font-size: 0.8rem; +} + +/* Inline (backticks) code inspired by docusaurus (which Auspice used previously) */ +.rst-content code.literal, +.rst-content tt.literal { + background-color: rgba(27,31,35,.05); + border-radius: 3px; + border: 0; + color: inherit; + margin: 0; + padding: 3.2px 6.4px; +} + +/* Sidebar */ +.wy-nav-side { + background: var(--sidebar-background-color); +} + +/* main content section */ +.wy-nav-content { + background: var(--content-background-color); +} + +/* don't change the background for the area on the RHS of the main content */ +.wy-nav-content-wrap { + background: inherit; +} + +/* Pin the Nextstrain logo, project name, version, and search box to the top of + * the sidebar when the sidebar scrolls. + */ +.wy-side-nav-search { + position: sticky; + top: 0; +} + +/* Sub-project name, version (optional) and link back to the main docs */ +.wy-side-nav-search > div.subproject { + margin-top: -1rem; + margin-bottom: 1.2rem; +} +.wy-side-nav-search > div.subproject > a { /* subproject name */ + font-size: 1.5rem; + font-weight: 500; + color: var(--heading-color); +} +.wy-side-nav-search > div.subproject > .version { /* version name */ + display: block; + font-size: 1.1rem; + font-weight: 300; + color: var(--heading-color); +} + +/* Remove blue accent border */ +.wy-side-nav-search input[type="text"] { + border-color: #ccc; +} + +.wy-menu-vertical { + overflow-y: scroll; +} +.wy-menu-vertical header, +.wy-menu-vertical p.caption, +.wy-menu-vertical a { + font-size: 0.8rem; +} + +/* Remove sidebar TOC link colors, hover states, and borders */ +.wy-menu-vertical a { + color: var(--text-color); + background: none !important; +} + +.wy-menu-vertical a:hover { + color: var(--link-color) !important; + background: none !important; +} + +.wy-menu-vertical li.current { + background: none !important; +} + +.wy-menu-vertical li.current a { + border-right: none; +} + +.wy-menu-vertical li.current > a { + border-right: 2px solid var(--link-color); +} + +.wy-menu-vertical li.toctree-l1.current > a { + border-top: none; + border-bottom: none; +} + +/* Remove sidebar TOC heading/caption color */ +.wy-menu-vertical p.caption { + color: var(--heading-color); +} + +/* the buttons (previous / next) at the bottom of each doc page */ +.wy-nav-content a.btn { + border: 1px solid #24292e; + border-radius: 3px; + color: inherit; + display: inline-block; + font-size: 14px; + font-weight: 400; + line-height: 1.2em; + padding: 10px; + text-decoration: none !important; + text-transform: uppercase; + transition: background .3s,color .3s; + box-shadow: none; + font-family: inherit; + background-color: inherit; +} +/* following needs !important to override sphynx CSS which itself uses !important */ +.wy-nav-content a.btn-neutral { + background-color: var(--content-background-color) !important; + color: var(--text-color) !important; +} +.wy-nav-content a.btn-neutral:hover { + background-color: var(--text-color) !important; + color: var(--content-background-color) !important; +} + +.wy-nav-content { + max-width: 900px !important; +} + +/* Mark external links in the sidebar */ +.wy-menu-vertical a.external::after { + display: inline-block; + font-family: FontAwesome; + font-size: 0.6rem; + font-style: normal; + font-variant: normal; + text-rendering: auto; + margin-left: 0.2rem; + content: ""; +} + + +/* Breadcrumb separators (at top of the page) */ +.wy-breadcrumbs > li:not(:first-child):not(.wy-breadcrumbs-aside)::before { + display: inline-block; + content: "/\A0"; /* \A0 = no-break space (nbsp) */ + padding-right: 5px; + + /* The trailing space + 5px _right_ padding matches the amount of whitespace + * on the other side of the slash (/) the comes from the spaces between
  • s + * in the template and the 5px _left_ padding on
  • s. The result is that + * the slash (/) is centered between the end of the previous
  • 's text and + * start of this
  • 's text. + */ +} + + +/* Tables + */ +/* .wy-nav-content-wrap .wy-nav-content .wy-table-responsive { + /* Tables are wrapped in a container
    (.wy-table-responsive) to handle + * overflow. Set this
    's width to the larger of (a) 100% of its + * (several levels removed) container (.wy-nav-content) or (b) the width of + * the viewport minus the width of the sidebar and some left-side padding. + * Most of the time the latter (b) will be larger, but the former (a) will be + * larger when the viewport is narrow and the sidebar is hidden (e.g. on a + * mobile device). + * + * This has the effect of allowing tables to expand rightwards out of the + * main content container (.wy-nav-content), which is limited to max-width: + * 800px for text readability. Tables are at first allowed to overrun just + * the right-side content padding, but as the viewport expands wider, tables + * will spill out into the new blank space beyond the main content padding. + * Allowing tables to extend right up against the viewport edge makes a table + * appear cut off, which makes it clearer to the reader that expanding the + * viewport will reveal more of the table. However, tables also remain + * horizontally scrollable as necessary to accommodate overflow. This + * provides two means for seeing overflowing table content (scrolling or + * expanding the viewport). + * + * For reference in the calculation below: + * + * 100vw = width of the viewport + * 300px = width of .wy-nav-side and corresponding margin-left of .wy-nav-content-wrap + * 3.236em = padding-left of .wy-nav-content + * + * -trs, 16 March 2022 + */ +/* width: max(100%, calc(100vw - 300px - 3.236em)); + + /* override earlier max-width: 100% */ +/* max-width: none; +}*/ + +/* Mobile nav (top bar heading + flyout menu icon) + */ + +.wy-nav-top a, .wy-nav-top i { + color: var(--heading-color); +} + + +/* Footer styles. Largely chosen to mimic the previous rendering of the docs. See +https://github.com/nextstrain/nextstrain.org/blob/b1e09e57e91ed0c9343e1cd3104877ec3c5344a4/static-site/src/components/Footer/index.jsx +*/ +footer { + color: var(--text-color); +} +footer div { + margin: 20px 0px 0px 0px; +} +footer .footer-small-text { + font-weight: 300; + font-size: 0.9rem; +} +footer .copyright { + font-weight: 300; + font-size: 0.8rem; + text-align: center; +} +footer div.logo { + display: flex; + flex-wrap: wrap; + justify-content: space-around; + margin: 0px; + padding: 10px 0px 0px 0px; +} +footer a.logo { + /* Using flex here (to vertically align the child img) causes aspect-ratio issues */ + flex-basis: 120px; + margin: 10px auto 10px auto; + text-align: center; +} +footer a.logo:hover { + text-decoration: none; +} +footer span.logo { + display: inline-block; + height: 100%; + vertical-align: middle; +} +footer img.logo { + display: inline-block; + vertical-align: middle; + height: auto; +} +footer p.avatar { + font-weight: 300; + font-size: 1.1rem; + text-align: center; + margin: 16px 0px -10px 0px; +} +footer div.avatar { + display: flex; + flex-wrap: wrap; + justify-content: center; + line-height: 2.5 +} +footer div.avatar a { + color: var(--text-color); +} +footer div.avatar span { + white-space: nowrap; + font-weight: 300; + margin-left: 2px; + margin-right: 2px; +} +footer div.avatar img { + margin-left: 5px; + margin-right: 4px; + border-radius: 50%; + vertical-align: middle; +} diff --git a/docs/source/theme/static/cyclops_logo-dark.png b/docs/source/theme/static/cyclops_logo-dark.png new file mode 100644 index 0000000..6f7f3c1 Binary files /dev/null and b/docs/source/theme/static/cyclops_logo-dark.png differ diff --git a/docs/source/theme/static/cyclops_logo.png b/docs/source/theme/static/cyclops_logo.png new file mode 100644 index 0000000..0b98f85 Binary files /dev/null and b/docs/source/theme/static/cyclops_logo.png differ diff --git a/docs/source/theme/static/favicon.ico b/docs/source/theme/static/favicon.ico new file mode 100644 index 0000000..8e0b9da Binary files /dev/null and b/docs/source/theme/static/favicon.ico differ diff --git a/docs/source/theme/static/js/theme.js b/docs/source/theme/static/js/theme.js new file mode 100644 index 0000000..f9f63bc --- /dev/null +++ b/docs/source/theme/static/js/theme.js @@ -0,0 +1,31 @@ +/* XXX TODO: Delete this file if the bug fix PR below is accepted and released + * by upstream. + * + * Overrides the default static/js/theme.js from sphinx-rtd-theme with our + * customized copy that fixes the initial scroll-into-view behaviour for the + * nav sidebar, c.f. . + * -trs, 8 July 2022 + */ +/* + * The MIT License (MIT) + * + * Copyright (c) 2013-2018 Dave Snider, Read the Docs, Inc. & contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of + * this software and associated documentation files (the "Software"), to deal in + * the Software without restriction, including without limitation the rights to + * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + * the Software, and to permit persons to whom the Software is furnished to do so, + * subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS + * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
    "),n("table.docutils.footnote").wrap("
    "),n("table.docutils.citation").wrap("
    "),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t.closest("li.toctree-l1")[0].scrollIntoView({block:"nearest"}),t[0].scrollIntoView({block:"nearest"})}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t +[theme] +inherit = sphinx_rtd_theme +stylesheet = css/cyclops.css +pygments_style = default + +[options] +style_nav_header_background = #efeeed +logo = True +logo_link = https://vectorinstitute.github.io/cyclops/ +subproject = True diff --git a/docs/source/tutorials.rst b/docs/source/tutorials.rst new file mode 100644 index 0000000..9eb574e --- /dev/null +++ b/docs/source/tutorials.rst @@ -0,0 +1,6 @@ +Tutorials +========= + +.. toctree:: + + tutorials_query diff --git a/docs/source/tutorials/eicu.ipynb b/docs/source/tutorials/eicu.ipynb new file mode 100644 index 0000000..e0874e7 --- /dev/null +++ b/docs/source/tutorials/eicu.ipynb @@ -0,0 +1,203 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "28288104-b8ce-43fa-a22b-8f02239551a5", + "metadata": {}, + "source": [ + "# eICU-CRD tutorial" + ] + }, + { + "cell_type": "markdown", + "id": "74b155b9-9ba6-455c-b987-4c261c339364", + "metadata": {}, + "source": [ + "This notebook shows examples of how to use the cyclops-query tool on [eicu-CRD](https://eicu-crd.mit.edu/).\n", + "\n", + "Each query is limit to 100 rows (for quick results).\n", + "\n", + "* First, setup the eICU database according to the instructions in [eicu-code](https://github.com/MIT-LCP/eicu-code/tree/master/build-db/postgres).\n", + "* The database is assumed to be hosted using postgres. Update the config parameters such as username and password, passed to `EICUQuerier` accordingly." + ] + }, + { + "cell_type": "markdown", + "id": "0c7c1635-3a70-42d1-99a6-c7f7a9cf9d21", + "metadata": {}, + "source": [ + "## Imports and instantiate `EICUQuerier`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "75a140e0-fb27-4319-862f-be54397abe5c", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "\"\"\"eICU-CRD tutorial.\"\"\"\n", + "\n", + "import cycquery.ops as qo\n", + "from cycquery import EICUQuerier\n", + "\n", + "\n", + "querier = EICUQuerier(\n", + " dbms=\"postgresql\",\n", + " port=5432,\n", + " host=\"localhost\",\n", + " database=\"eicu\",\n", + " user=\"postgres\",\n", + " password=\"pwd\",\n", + ")\n", + "# List all tables.\n", + "querier.list_tables(\"eicu_crd\")" + ] + }, + { + "cell_type": "markdown", + "id": "ea04cc11-e947-4097-91df-729f28b3732c", + "metadata": {}, + "source": [ + "## Example 1. Get all female patients discharged in 2014 (limit to 100 rows)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c1efa964-8978-4a0e-9892-5ea4ce9953a3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ops = qo.Sequential(\n", + " qo.ConditionEquals(\"hospitaldischargeyear\", 2014),\n", + " qo.ConditionEquals(\"gender\", \"Female\"),\n", + ")\n", + "patients = querier.eicu_crd.patient()\n", + "patients = patients.ops(ops).run(limit=100)\n", + "print(f\"{len(patients)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "cd43679b-3d33-4135-b97c-373447289a9c", + "metadata": { + "tags": [] + }, + "source": [ + "## Example 2. Get all patient encounters with diagnoses (`schizophrenia` in `diagnosisstring`), discharged in the year 2015." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7ab5fa3-e26b-47a7-818f-1bf367a55760", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "patients = querier.eicu_crd.patient()\n", + "diagnoses = querier.eicu_crd.diagnosis()\n", + "diagnoses = diagnoses.ops(qo.ConditionSubstring(\"diagnosisstring\", \"schizophrenia\"))\n", + "patient_diagnoses = patients.join(\n", + " join_table=diagnoses,\n", + " on=\"patientunitstayid\",\n", + ")\n", + "patient_diagnoses = patient_diagnoses.run(limit=100)\n", + "print(f\"{len(patient_diagnoses)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "241f7d10-9e04-44ae-b325-87f5a4046df2", + "metadata": {}, + "source": [ + "## Example 3. Get potassium lab tests for patients discharged in the year 2014, for all teaching hospitals." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24043abc-1878-4e00-8229-36d4a0368b98", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "hospitals = querier.eicu_crd.hospital()\n", + "hospitals = hospitals.ops(qo.ConditionEquals(\"teachingstatus\", True))\n", + "patients = querier.eicu_crd.patient()\n", + "patients = patients.ops(qo.ConditionEquals(\"hospitaldischargeyear\", 2015))\n", + "patients = patients.join(\n", + " join_table=hospitals,\n", + " on=\"hospitalid\",\n", + ")\n", + "labs = querier.eicu_crd.lab()\n", + "labs = labs.ops(qo.ConditionEquals(\"labname\", \"potassium\"))\n", + "patient_labs = patients.join(\n", + " join_table=labs,\n", + " on=\"patientunitstayid\",\n", + ").run(limit=100)\n", + "print(f\"{len(patient_labs)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "2dbf0fdc-8d99-4fde-ab3e-7c8a67b72f8b", + "metadata": {}, + "source": [ + "## Example 4. Get glucose medications (substring search) for female patients discharged in 2014." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f6142f27-e8d1-453c-bfe2-2265d9ff1914", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ops = qo.Sequential(\n", + " qo.ConditionEquals(\"hospitaldischargeyear\", 2014),\n", + " qo.ConditionEquals(\"gender\", \"Female\"),\n", + ")\n", + "patients = querier.eicu_crd.patient()\n", + "patients = patients.ops(ops)\n", + "medications = querier.eicu_crd.medication()\n", + "medications = medications.ops(qo.ConditionSubstring(\"drugname\", \"glucose\"))\n", + "patient_medications = patients.join(\n", + " join_table=medications,\n", + " on=\"patientunitstayid\",\n", + ").run(limit=100)\n", + "print(f\"{len(patient_medications)} rows extracted!\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/source/tutorials/gemini.ipynb b/docs/source/tutorials/gemini.ipynb new file mode 100644 index 0000000..53797d6 --- /dev/null +++ b/docs/source/tutorials/gemini.ipynb @@ -0,0 +1,297 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "6b8f5515-0d77-4989-8e08-33872eafb790", + "metadata": {}, + "source": [ + "# GEMINI tutorial" + ] + }, + { + "cell_type": "markdown", + "id": "57e3214e-f1d5-46cb-bb2a-514a48e0269d", + "metadata": {}, + "source": [ + "This notebook shows examples of how to use the cyclops-query tool on [GEMINI](https://www.geminimedicine.ca/)." + ] + }, + { + "cell_type": "markdown", + "id": "633e60f1-d4cc-4d4a-87de-db14c4152ac6", + "metadata": {}, + "source": [ + "## Imports and instantiate `GEMINIQuerier`." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "00c4a25b-1f3d-4613-b20f-469e7b1043b7", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-09-21 09:17:45,449 \u001b[1;37mINFO\u001b[0m cyclops.query.orm - Database setup, ready to run queries!\n" + ] + }, + { + "data": { + "text/plain": [ + "['public.lookup_icd10_ca_description',\n", + " 'public.lookup_statcan',\n", + " 'public.lookup_cci',\n", + " 'public.lookup_icd10_ca_to_ccsr',\n", + " 'public.lookup_ip_administrative',\n", + " 'public.lookup_lab_concept',\n", + " 'public.lookup_vitals_concept',\n", + " 'public.lookup_pharmacy_concept',\n", + " 'public.lookup_diagnosis',\n", + " 'public.locality_variables',\n", + " 'public.admdad',\n", + " 'public.derived_variables',\n", + " 'public.ipscu',\n", + " 'public.lookup_phy_characteristics',\n", + " 'public.lab',\n", + " 'public.ipintervention',\n", + " 'public.lookup_ccsr',\n", + " 'public.lookup_pharmacy_route',\n", + " 'public.lookup_transfusion_concept',\n", + " 'public.lookup_ip_scu',\n", + " 'public.lookup_er_administrative',\n", + " 'public.lookup_imaging',\n", + " 'public.pharmacy',\n", + " 'public.radiology',\n", + " 'public.lookup_transfer',\n", + " 'public.ipdiagnosis',\n", + " 'public.lookup_room_transfer',\n", + " 'public.er',\n", + " 'public.erdiagnosis',\n", + " 'public.erintervention',\n", + " 'public.roomtransfer',\n", + " 'public.transfusion',\n", + " 'public.vitals',\n", + " 'public.lookup_hospital_num']" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\"\"\"GEMINI tutorial.\"\"\"\n", + "\n", + "import cycquery.ops as qo\n", + "from cycquery import GEMINIQuerier\n", + "\n", + "\n", + "querier = GEMINIQuerier(\n", + " host=\"db.gemini-hpc.ca\",\n", + " database=\"delirium_v4_0_1\",\n", + " user=\"username\",\n", + " password=\"password\",\n", + ")\n", + "# List all tables.\n", + "querier.list_tables(\"public\")" + ] + }, + { + "cell_type": "markdown", + "id": "7980e403-1f99-4886-8856-d6d6b11ec15d", + "metadata": {}, + "source": [ + "## Example 1a. Create a table with only one hospitalization per patient, keeping the most recent encounter for each patient. Sort the dataset by `patient_id_hashed` and `discharge_date_time`, and then keep the recent record." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "2f3e9b5f-3156-4414-a296-1e45bde9d147", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-09-21 09:17:51,902 \u001b[1;37mINFO\u001b[0m cyclops.query.orm - Query returned successfully!\n", + "2023-09-21 09:17:51,903 \u001b[1;37mINFO\u001b[0m cyclops.utils.profile - Finished executing function run_query in 6.093352 s\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "189734 rows extracted!\n" + ] + } + ], + "source": [ + "ops = qo.Sequential(\n", + " qo.OrderBy(\n", + " [\"patient_id_hashed\", \"discharge_date_time\"],\n", + " ascending=[True, False],\n", + " ),\n", + " qo.Distinct(\"patient_id_hashed\"),\n", + ")\n", + "encounters = querier.public.admdad()\n", + "encounters = encounters.ops(ops).run()\n", + "print(f\"{len(encounters)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "9040fd47-79b7-4d1d-9ca4-2ea09efea267", + "metadata": {}, + "source": [ + "## Example 1b. From the above set of encounters, take a subset of patients who were discharged between April 1, 2015 and March 31, 2016." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "105c0824-b735-4f6b-a008-7ddf8279eb20", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-09-21 09:17:52,591 \u001b[1;37mINFO\u001b[0m cyclops.query.orm - Query returned successfully!\n", + "2023-09-21 09:17:52,592 \u001b[1;37mINFO\u001b[0m cyclops.utils.profile - Finished executing function run_query in 0.675141 s\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "32567 rows extracted!\n" + ] + } + ], + "source": [ + "ops = qo.Sequential(\n", + " qo.Cast(\"discharge_date_time\", \"timestamp\"),\n", + " qo.ConditionAfterDate(\"discharge_date_time\", \"2015-04-01\"),\n", + " qo.ConditionBeforeDate(\"discharge_date_time\", \"2016-03-31\"),\n", + ")\n", + "encounters_query = querier.public.admdad()\n", + "encounters_query = encounters_query.ops(ops)\n", + "encounters = encounters_query.run()\n", + "print(f\"{len(encounters)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "f4d8b0bc-4fe1-4b07-86ad-ce1032708d78", + "metadata": {}, + "source": [ + "## Example 1c. From the above set of encounters, get the total number of admissions for each hospital." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "65d92139-c1d5-45ce-908f-9052c9131ed3", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-09-21 09:17:52,856 \u001b[1;37mINFO\u001b[0m cyclops.query.orm - Query returned successfully!\n", + "2023-09-21 09:17:52,857 \u001b[1;37mINFO\u001b[0m cyclops.utils.profile - Finished executing function run_query in 0.145693 s\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "7 rows extracted!\n" + ] + } + ], + "source": [ + "ops = qo.GroupByAggregate(\"hospital_num\", {\"hospital_num\": (\"count\", \"count\")})\n", + "encounters_per_site = encounters_query.ops(ops).run()\n", + "print(f\"{len(encounters_per_site)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "848c5b90-1e66-4a5a-9ebb-9c0caa105c3f", + "metadata": {}, + "source": [ + "## Example 2a. How many sodium tests were placed between Apr 1, 2015 and May 31, 2015 at hospital 101?" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "33bd3450-985f-4cc5-9da9-09846ad297e7", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-09-21 09:26:19,814 \u001b[1;37mINFO\u001b[0m cyclops.query.orm - Query returned successfully!\n", + "2023-09-21 09:26:19,815 \u001b[1;37mINFO\u001b[0m cyclops.utils.profile - Finished executing function run_query in 506.939296 s\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1 rows extracted!\n", + " hospital_num count\n", + "0 101 9305\n" + ] + } + ], + "source": [ + "encounters = querier.public.admdad()\n", + "encounters = encounters.ops(qo.ConditionEquals(\"hospital_num\", 101))\n", + "lab_ops = qo.Sequential(\n", + " qo.DropEmpty(\"collection_date_time\"),\n", + " qo.Cast(\"collection_date_time\", \"timestamp\"),\n", + " qo.ConditionAfterDate(\"collection_date_time\", \"2015-04-01\"),\n", + " qo.ConditionBeforeDate(\"collection_date_time\", \"2015-05-31\"),\n", + " qo.ConditionSubstring(\"test_type_mapped\", \"sodium\"),\n", + ")\n", + "labs = querier.public.lab()\n", + "labs = labs.ops(lab_ops)\n", + "encounters_labs = encounters.join(labs, on=\"genc_id\")\n", + "encounters_labs = encounters_labs.ops(\n", + " qo.GroupByAggregate(\"hospital_num\", {\"hospital_num\": (\"count\", \"count\")}),\n", + ")\n", + "sodium_tests = encounters_labs.run()\n", + "print(f\"{len(sodium_tests)} rows extracted!\")\n", + "print(sodium_tests)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/source/tutorials/mimiciii.ipynb b/docs/source/tutorials/mimiciii.ipynb new file mode 100644 index 0000000..b935b26 --- /dev/null +++ b/docs/source/tutorials/mimiciii.ipynb @@ -0,0 +1,197 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "28288104-b8ce-43fa-a22b-8f02239551a5", + "metadata": {}, + "source": [ + "# MIMIC-III tutorial" + ] + }, + { + "cell_type": "markdown", + "id": "74b155b9-9ba6-455c-b987-4c261c339364", + "metadata": {}, + "source": [ + "This notebook shows examples of how to use the cyclops-query tool on [MIMIC-III v1.4](https://physionet.org/content/mimiciii/1.4/).\n", + "\n", + "Each query is limit to 100 rows (for quick results).\n", + "\n", + "* First, setup the MIMIC-III database according to the instructions in [mimic-code](https://github.com/MIT-LCP/mimic-code/tree/main/mimic-iii/buildmimic/postgres).\n", + "* The database is assumed to be hosted using postgres. Update the config parameters such as username and password, passed to `MIMICIIIQuerier` accordingly." + ] + }, + { + "cell_type": "markdown", + "id": "0c7c1635-3a70-42d1-99a6-c7f7a9cf9d21", + "metadata": {}, + "source": [ + "## Imports and instantiate `MIMICIIIQuerier`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "75a140e0-fb27-4319-862f-be54397abe5c", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "\"\"\"MIMICIII tutorial.\"\"\"\n", + "\n", + "import cycquery.ops as qo\n", + "from cycquery import MIMICIIIQuerier\n", + "\n", + "\n", + "querier = MIMICIIIQuerier(\n", + " dbms=\"postgresql\",\n", + " port=5432,\n", + " host=\"localhost\",\n", + " database=\"mimiciii\",\n", + " user=\"postgres\",\n", + " password=\"pwd\",\n", + ")\n", + "# List all custom table methods.\n", + "querier.list_custom_tables()" + ] + }, + { + "cell_type": "markdown", + "id": "ea04cc11-e947-4097-91df-729f28b3732c", + "metadata": {}, + "source": [ + "## Example 1. Get all male patients with a mortality outcome." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c1efa964-8978-4a0e-9892-5ea4ce9953a3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ops = qo.Sequential(\n", + " qo.ConditionEquals(\"expire_flag\", 1),\n", + " qo.ConditionEquals(\"gender\", \"M\"),\n", + ")\n", + "patients = querier.mimiciii.patients()\n", + "patients = patients.ops(ops).run(limit=100)\n", + "print(f\"{len(patients)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "cd43679b-3d33-4135-b97c-373447289a9c", + "metadata": { + "tags": [] + }, + "source": [ + "## Example 2. Get all female patient encounters with diagnoses (`gastroenteritis` in ICD-9 long title)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7ab5fa3-e26b-47a7-818f-1bf367a55760", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "patients = querier.mimiciii.patients()\n", + "patients = patients.ops(qo.ConditionEquals(\"gender\", \"F\"))\n", + "admissions = querier.mimiciii.admissions()\n", + "patient_admissions = patients.join(\n", + " join_table=admissions,\n", + " on=\"subject_id\",\n", + ")\n", + "diagnoses = querier.diagnoses()\n", + "diagnoses = diagnoses.ops(qo.ConditionSubstring(\"long_title\", \"gastroenteritis\"))\n", + "patient_admissions_diagnoses = patient_admissions.join(\n", + " join_table=diagnoses,\n", + " on=[\"subject_id\", \"hadm_id\"],\n", + ").run(limit=100)\n", + "print(f\"{len(patient_admissions_diagnoses)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "241f7d10-9e04-44ae-b325-87f5a4046df2", + "metadata": {}, + "source": [ + "## Example 3. Get potassium lab tests for female patients." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24043abc-1878-4e00-8229-36d4a0368b98", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "patients = querier.mimiciii.patients()\n", + "patients = patients.ops(qo.ConditionEquals(\"gender\", \"F\"))\n", + "labs = querier.labevents()\n", + "labs = labs.ops(qo.ConditionEquals(\"label\", \"potassium\"))\n", + "patient_labs = patients.join(labs, on=\"subject_id\").run(limit=100)\n", + "print(f\"{len(patient_labs)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "2dbf0fdc-8d99-4fde-ab3e-7c8a67b72f8b", + "metadata": {}, + "source": [ + "## Example 4. Get AaDO2 carevue chart events for male patients that have a `valuenum` of less than 20." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f6142f27-e8d1-453c-bfe2-2265d9ff1914", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "chartevents_ops = qo.Sequential(\n", + " qo.ConditionEquals(\"dbsource\", \"carevue\"),\n", + " qo.ConditionEquals(\"label\", \"AaDO2\"),\n", + " qo.ConditionLessThan(\"valuenum\", 20),\n", + ")\n", + "patients = querier.mimiciii.patients()\n", + "patients = patients.ops(qo.ConditionEquals(\"gender\", \"M\"))\n", + "chart_events = querier.chartevents()\n", + "chart_events = chart_events.ops(chartevents_ops)\n", + "patient_chart_events = patients.join(chart_events, on=\"subject_id\").run(limit=100)\n", + "print(f\"{len(patient_chart_events)} rows extracted!\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/source/tutorials/mimiciv.ipynb b/docs/source/tutorials/mimiciv.ipynb new file mode 100644 index 0000000..4a9048f --- /dev/null +++ b/docs/source/tutorials/mimiciv.ipynb @@ -0,0 +1,341 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e337389b-1cfe-4796-a846-b4e1ba5690d6", + "metadata": {}, + "source": [ + "# MIMIC-IV tutorial" + ] + }, + { + "cell_type": "markdown", + "id": "1efffc64", + "metadata": {}, + "source": [ + "This notebook shows examples of how to use the cyclops-query tool on [MIMIC-IV v2.0](https://physionet.org/content/mimiciv/2.0/).\n", + "\n", + "Each query is limit to 100 rows (for quick results).\n", + "\n", + "* First, setup the MIMIC-IV database according to the instructions in [mimic-code](https://github.com/MIT-LCP/mimic-code/tree/main/mimic-iv/buildmimic/postgres).\n", + "* The database is assumed to be hosted using postgres. Update the config parameters such as username and password, passed to `MIMICIVQuerier` accordingly." + ] + }, + { + "cell_type": "markdown", + "id": "12c18656-7f16-4230-85d0-944563d6a13e", + "metadata": {}, + "source": [ + "## Imports and instantiate `MIMICIVQuerier`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "53009e6b", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "\"\"\"MIMICIV tutorial.\"\"\"\n", + "\n", + "import cycquery.ops as qo\n", + "from cycquery import MIMICIVQuerier\n", + "\n", + "\n", + "querier = MIMICIVQuerier(\n", + " dbms=\"postgresql\",\n", + " port=5432,\n", + " host=\"localhost\",\n", + " database=\"mimiciv-2.0\",\n", + " user=\"postgres\",\n", + " password=\"pwd\",\n", + ")\n", + "# List all schemas.\n", + "querier.list_schemas()" + ] + }, + { + "cell_type": "markdown", + "id": "7b6214f8", + "metadata": { + "tags": [] + }, + "source": [ + "## Example 1. Get all patient admissions from 2021 or later (approx year of admission)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cdfadaa4-6fd6-4fd7-85cf-e012aa0799e1", + "metadata": {}, + "outputs": [], + "source": [ + "patients = querier.patients()\n", + "admissions = querier.mimiciv_hosp.admissions()\n", + "patient_admissions = patients.join(admissions, on=\"subject_id\")\n", + "ops = qo.Sequential(\n", + " qo.AddDeltaColumn([\"admittime\", \"dischtime\"], years=\"anchor_year_difference\"),\n", + " qo.ConditionAfterDate(\"admittime\", \"2021-01-01\"),\n", + ")\n", + "patient_admissions = patient_admissions.ops(ops).run(limit=100)\n", + "print(f\"{len(patient_admissions)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "80d9f06e", + "metadata": {}, + "source": [ + "## Example 2. Get all patient encounters with diagnoses (`schizophrenia` in ICD-10 long title), in the year 2015." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a89a9cf0", + "metadata": {}, + "outputs": [], + "source": [ + "patients = querier.patients()\n", + "admissions = querier.mimiciv_hosp.admissions()\n", + "patient_admissions = patients.join(admissions, on=\"subject_id\")\n", + "ops = qo.Sequential(\n", + " qo.AddDeltaColumn([\"admittime\", \"dischtime\"], years=\"anchor_year_difference\"),\n", + " qo.ConditionInYears(\"admittime\", \"2015\"),\n", + ")\n", + "patient_admissions = patient_admissions.ops(ops)\n", + "diagnoses = querier.diagnoses()\n", + "diagnoses_ops = qo.Sequential(\n", + " qo.ConditionEquals(\"icd_version\", 10),\n", + " qo.ConditionSubstring(\"long_title\", \"schizophrenia\"),\n", + ")\n", + "diagnoses = diagnoses.ops(diagnoses_ops)\n", + "patient_admissions_diagnoses = patient_admissions.join(\n", + " join_table=diagnoses,\n", + " on=[\"subject_id\", \"hadm_id\"],\n", + ").run(limit=100)\n", + "print(f\"{len(patient_admissions_diagnoses)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "e2baea54", + "metadata": {}, + "source": [ + "## Example 3. Advanced - uses `ConditionRegexMatch` from `cyclops.query.ops`. Get all patient encounters with diagnoses (ICD-9 long title contains `schizophrenia` and `chronic` ), in the year 2015." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "03936cee", + "metadata": {}, + "outputs": [], + "source": [ + "patients = querier.patients()\n", + "admissions = querier.mimiciv_hosp.admissions()\n", + "patient_admissions = patients.join(admissions, on=\"subject_id\")\n", + "ops = qo.Sequential(\n", + " qo.AddDeltaColumn([\"admittime\", \"dischtime\"], years=\"anchor_year_difference\"),\n", + " qo.ConditionInYears(\"admittime\", \"2015\"),\n", + ")\n", + "patient_admissions = patient_admissions.ops(ops)\n", + "diagnoses = querier.diagnoses()\n", + "diagnoses_ops = qo.Sequential(\n", + " qo.ConditionEquals(\"icd_version\", 9),\n", + " qo.ConditionRegexMatch(\"long_title\", r\"(?=.*schizophrenia)(?=.*chronic)\"),\n", + ")\n", + "diagnoses = diagnoses.ops(diagnoses_ops)\n", + "patient_admissions_diagnoses = patient_admissions.join(\n", + " join_table=diagnoses,\n", + " on=[\"subject_id\", \"hadm_id\"],\n", + ").run(limit=100)\n", + "print(f\"{len(patient_admissions_diagnoses)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "30b0d604", + "metadata": {}, + "source": [ + "## Example 4. Get routine vital signs for patients from year 2015." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56a72377", + "metadata": {}, + "outputs": [], + "source": [ + "patients = querier.patients()\n", + "admissions = querier.mimiciv_hosp.admissions()\n", + "patient_admissions = patients.join(admissions, on=\"subject_id\")\n", + "ops = qo.Sequential(\n", + " qo.AddDeltaColumn([\"admittime\", \"dischtime\"], years=\"anchor_year_difference\"),\n", + " qo.ConditionInYears(\"admittime\", \"2015\"),\n", + ")\n", + "patient_admissions = patient_admissions.ops(ops)\n", + "chart_events = querier.chartevents()\n", + "vitals = chart_events.ops(qo.ConditionEquals(\"category\", \"Routine Vital Signs\"))\n", + "patient_admissions_vitals = patient_admissions.join(\n", + " join_table=vitals,\n", + " on=[\"subject_id\", \"hadm_id\"],\n", + ").run(limit=100)\n", + "print(f\"{len(patient_admissions_vitals)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "621479f0", + "metadata": {}, + "source": [ + "## Example 5. Get hemoglobin lab tests for patients from year 2009." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bce11f81", + "metadata": {}, + "outputs": [], + "source": [ + "patients = querier.patients()\n", + "admissions = querier.mimiciv_hosp.admissions()\n", + "patient_admissions = patients.join(admissions, on=\"subject_id\")\n", + "ops = qo.Sequential(\n", + " qo.AddDeltaColumn([\"admittime\", \"dischtime\"], years=\"anchor_year_difference\"),\n", + " qo.ConditionInYears(\"admittime\", \"2009\"),\n", + ")\n", + "patient_admissions = patient_admissions.ops(ops)\n", + "chart_events = querier.chartevents()\n", + "labs = chart_events.ops(qo.ConditionEquals(\"label\", \"hemoglobin\"))\n", + "patient_admissions_labs = patient_admissions.join(\n", + " join_table=labs,\n", + " on=[\"subject_id\", \"hadm_id\"],\n", + ").run(limit=100)\n", + "print(f\"{len(patient_admissions_labs)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "b0a9bc12-dda3-4445-9156-52d295a1c48f", + "metadata": {}, + "source": [ + "## Example 6. Get radiology reports and filter on keywords `lymphadenopathy` and `infectious` occurring together from year 2009." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f00d270c-d78f-4dc0-8dae-ff4d52958c8b", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "patients = querier.patients()\n", + "admissions = querier.mimiciv_hosp.admissions()\n", + "patient_admissions = patients.join(admissions, on=\"subject_id\")\n", + "ops = qo.Sequential(\n", + " qo.AddDeltaColumn([\"admittime\", \"dischtime\"], years=\"anchor_year_difference\"),\n", + " qo.ConditionInYears(\"admittime\", \"2009\"),\n", + ")\n", + "patient_admissions = patient_admissions.ops(ops)\n", + "radiology_notes = querier.mimiciv_note.radiology()\n", + "radiology_notes_ops = qo.Sequential(\n", + " qo.And(\n", + " qo.ConditionLike(\"text\", \"% lymphadenopathy %\"),\n", + " qo.ConditionLike(\"text\", \"% infectious %\"),\n", + " ),\n", + ")\n", + "radiology_notes = radiology_notes.ops(radiology_notes_ops)\n", + "patient_admissions_radiology_notes = patient_admissions.join(\n", + " join_table=radiology_notes,\n", + " on=[\"subject_id\", \"hadm_id\"],\n", + ").run(limit=100)\n", + "print(f\"{len(patient_admissions_radiology_notes)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "9550ba62-c128-4259-a075-0cbd9c70b662", + "metadata": {}, + "source": [ + "## Example 7. Get all female patient encounters from year 2015, and return as dask dataframe (lazy evaluation) with 4 partitions (batches) aggregated based on `subject_id`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "28683d70-376e-4d9b-883d-1a7de634e455", + "metadata": {}, + "outputs": [], + "source": [ + "patients = querier.patients()\n", + "admissions = querier.mimiciv_hosp.admissions()\n", + "patient_admissions = patients.join(admissions, on=\"subject_id\")\n", + "ops = qo.Sequential(\n", + " qo.AddDeltaColumn([\"admittime\", \"dischtime\"], years=\"anchor_year_difference\"),\n", + " qo.ConditionInYears(\"admittime\", \"2015\"),\n", + " qo.Cast(\"gender\", \"str\"),\n", + " qo.ConditionEquals(\"gender\", \"F\"),\n", + ")\n", + "patient_admissions = patient_admissions.ops(ops)\n", + "patient_admissions = patient_admissions.run(\n", + " backend=\"dask\",\n", + " index_col=\"subject_id\",\n", + " n_partitions=4,\n", + ")\n", + "print(f\"{len(patient_admissions)} rows extracted!\")\n", + "print(f\"Return type: {type(patient_admissions)}\")\n", + "print(f\"Number of partitions: {patient_admissions.npartitions}\")" + ] + }, + { + "cell_type": "markdown", + "id": "e1ed2708", + "metadata": {}, + "source": [ + "## Example 8. Running a raw SQL string." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a853deec", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "data = querier.db.run_query(\"SELECT * FROM mimiciv_hosp.admissions LIMIT 100\")\n", + "print(f\"{len(data)} rows extracted!\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/source/tutorials/omop.ipynb b/docs/source/tutorials/omop.ipynb new file mode 100644 index 0000000..16b7928 --- /dev/null +++ b/docs/source/tutorials/omop.ipynb @@ -0,0 +1,288 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e337389b-1cfe-4796-a846-b4e1ba5690d6", + "metadata": {}, + "source": [ + "# OMOP tutorial" + ] + }, + { + "cell_type": "markdown", + "id": "1efffc64", + "metadata": {}, + "source": [ + "This notebook shows examples of how to use the cyclops-query tool to query EHR databases that follow the OMOP common data model. Each query is limit to 100 rows (for quick results).\n", + "\n", + "We showcase the examples on:\n", + "\n", + "1. [Synthea](https://github.com/synthetichealth/synthea) in OMOP format.\n", + "\n", + " * First, generate synthea data using their releases. We used [v2.7.0](https://github.com/synthetichealth/synthea/releases/tag/v2.7.0) to generate data .\n", + " * Follow instructions provided in [ETL-Synthea](https://github.com/OHDSI/ETL-Synthea) to load the CSV data into a postgres database, and perform ETL to load the data into OMOP format." + ] + }, + { + "cell_type": "markdown", + "id": "12c18656-7f16-4230-85d0-944563d6a13e", + "metadata": {}, + "source": [ + "## Imports and instantiate `OMOPQuerier`." + ] + }, + { + "cell_type": "markdown", + "id": "c4cfec2d-af3f-431d-a851-c8853e31df08", + "metadata": {}, + "source": [ + "Pass in the `schema_name` which is the name of the postgres schema which houses all the OMOP tables." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "53009e6b", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"OMOP tutorial.\"\"\"\n", + "\n", + "import pandas as pd\n", + "\n", + "import cycquery.ops as qo\n", + "from cycquery import OMOPQuerier\n", + "\n", + "\n", + "querier = OMOPQuerier(\n", + " dbms=\"postgresql\",\n", + " port=5432,\n", + " host=\"localhost\",\n", + " database=\"synthea_integration_test\",\n", + " user=\"postgres\",\n", + " password=\"pwd\",\n", + " schema_name=\"cdm_synthea10\",\n", + ")\n", + "# List all tables.\n", + "querier.list_tables(\"cdm_synthea10\")" + ] + }, + { + "cell_type": "markdown", + "id": "7b6214f8", + "metadata": { + "tags": [] + }, + "source": [ + "## Example 1. Get all patient visits in or after 2010." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3a3d9cb9-fe40-45b8-ba2f-8de52a3b7f4f", + "metadata": {}, + "outputs": [], + "source": [ + "visits = querier.visit_occurrence()\n", + "visits = visits.ops(qo.ConditionAfterDate(\"visit_start_date\", \"2010-01-01\"))\n", + "visits = visits.run(limit=100)\n", + "print(f\"{len(visits)} rows extracted!\")\n", + "pd.to_datetime(visits[\"visit_start_date\"]).dt.year.value_counts().sort_index()" + ] + }, + { + "cell_type": "markdown", + "id": "fcaea674-b967-4fbc-a7be-4d8b4492ef56", + "metadata": {}, + "source": [ + "## Example 2. Get measurements for all visits in or after 2020." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "030e2491-a7cc-42f3-a1ca-618212b3524c", + "metadata": {}, + "outputs": [], + "source": [ + "visits = querier.visit_occurrence()\n", + "visits = visits.ops(qo.ConditionAfterDate(\"visit_start_date\", \"2020-01-01\"))\n", + "measurements = querier.measurement()\n", + "visits_measurements = visits.join(\n", + " join_table=measurements,\n", + " on=\"visit_occurrence_id\",\n", + ").run(limit=100)\n", + "print(f\"{len(visits_measurements)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "602273d9-8d78-4e69-8566-7f9d04553d3a", + "metadata": { + "tags": [] + }, + "source": [ + "2. [MIMIC-III v1.4](https://physionet.org/content/mimiciii/1.4/) in OMOP format.\n", + "\n", + "* First, setup the MIMIC-III database according to the instructions in [mimic-code](https://github.com/MIT-LCP/mimic-code/tree/main/mimic-iii/buildmimic/postgres).\n", + "* Perform the ETL in the [mimic-omop](https://github.com/MIT-LCP/mimic-omop) repo.\n", + "* The database is assumed to be hosted using postgres. Update the config parameters such as username and password, passed to `MIMICIIIQuerier` accordingly." + ] + }, + { + "cell_type": "markdown", + "id": "10da36a2-28e2-4975-83c7-68d0d366e459", + "metadata": {}, + "source": [ + "## Imports and instantiate `OMOPQuerier`." + ] + }, + { + "cell_type": "markdown", + "id": "4a050fa9-442b-44dc-b241-86e73516e693", + "metadata": {}, + "source": [ + "Pass in the `schema_name` which is the name of the postgres schema which houses all the OMOP tables." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0622b3df-2864-4f32-bd98-806019f59c50", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "querier = OMOPQuerier(\n", + " dbms=\"postgresql\",\n", + " port=5432,\n", + " host=\"localhost\",\n", + " database=\"mimiciii\",\n", + " user=\"postgres\",\n", + " password=\"pwd\",\n", + " schema_name=\"omop\",\n", + ")\n", + "# List all schemas.\n", + "querier.list_schemas()" + ] + }, + { + "cell_type": "markdown", + "id": "c4967160-bd45-4ce0-8b00-27f4f9742c68", + "metadata": {}, + "source": [ + "## Example 1. Get all patient visits that ended in a mortality outcome in or after 2010." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "40ff2e83-75e4-4119-aa33-26f95e63ddaa", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "visits = querier.visit_occurrence()\n", + "visits = visits.ops(qo.ConditionAfterDate(\"visit_start_date\", \"2010-01-01\"))\n", + "visits_concept_mapped = querier.map_concept_ids_to_name(\n", + " visits,\n", + " [\n", + " \"discharge_to_concept_id\",\n", + " \"admitting_concept_id\",\n", + " ],\n", + ")\n", + "visits_concept_mapped_died = visits_concept_mapped.ops(\n", + " qo.ConditionSubstring(\"discharge_to_concept_name\", \"died\"),\n", + ").run()\n", + "print(f\"{len(visits_concept_mapped_died)} rows extracted!\")" + ] + }, + { + "cell_type": "markdown", + "id": "996ad6ff-8213-43ea-b701-e52a1d0b01bf", + "metadata": { + "tags": [] + }, + "source": [ + "## Example 2. Get all measurements for female patient visits with `sepsis` diagnoses, that ended in a mortality outcome." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46fd771c-5da7-4bce-aec7-08a5210a069b", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "persons = querier.person()\n", + "persons = persons.ops(qo.ConditionSubstring(\"gender_concept_name\", \"FEMALE\"))\n", + "visits = querier.visit_occurrence()\n", + "person_visits = persons.join(visits, on=\"person_id\")\n", + "conditions = querier.omop.condition_occurrence()\n", + "person_visits_conditions = person_visits.join(\n", + " conditions,\n", + " on=\"visit_occurrence_id\",\n", + " isouter=True,\n", + ")\n", + "measurement = querier.measurement()\n", + "person_visits_conditions_measurements = person_visits_conditions.join(\n", + " measurement,\n", + " on=\"visit_occurrence_id\",\n", + " isouter=True,\n", + ")\n", + "person_visits_conditions_measurements = querier.map_concept_ids_to_name(\n", + " person_visits_conditions_measurements,\n", + " [\n", + " \"discharge_to_concept_id\",\n", + " \"admitting_concept_id\",\n", + " \"condition_concept_id\",\n", + " ],\n", + ")\n", + "ops = qo.Sequential(\n", + " qo.ConditionSubstring(\"discharge_to_concept_name\", \"died\"),\n", + " qo.ConditionSubstring(\"condition_concept_name\", \"sepsis\"),\n", + ")\n", + "cohort = person_visits_conditions_measurements.ops(ops).run(limit=100)\n", + "print(f\"{len(cohort)} rows extracted!\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d20a2581-f613-4ab8-9feb-3e84b8835db1", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "cohort[\"measurement_concept_name\"].value_counts()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/source/tutorials_query.rst b/docs/source/tutorials_query.rst new file mode 100644 index 0000000..6f37e08 --- /dev/null +++ b/docs/source/tutorials_query.rst @@ -0,0 +1,16 @@ +query API +========= + +The query API allows you to query EHR databases. It is a Python API that +that communicates with postgresql databases. It is a wrapper around the +SQLAlchemy ORM and uses SQLAlchemy query objects and functions to build +queries. + + +.. toctree:: + + tutorials/mimiciii.ipynb + tutorials/mimiciv.ipynb + tutorials/eicu.ipynb + tutorials/omop.ipynb + tutorials/gemini.ipynb diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..260df71 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,3055 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "anyascii" +version = "0.3.2" +description = "Unicode to ASCII transliteration" +optional = false +python-versions = ">=3.3" +files = [ + {file = "anyascii-0.3.2-py3-none-any.whl", hash = "sha256:3b3beef6fc43d9036d3b0529050b0c48bfad8bc960e9e562d7223cfb94fe45d4"}, + {file = "anyascii-0.3.2.tar.gz", hash = "sha256:9d5d32ef844fe225b8bc7cba7f950534fae4da27a9bf3a6bea2cb0ea46ce4730"}, +] + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] + +[[package]] +name = "astroid" +version = "3.0.1" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.1-py3-none-any.whl", hash = "sha256:7d5895c9825e18079c5aeac0572bc2e4c83205c95d416e0b4fee8bc361d2d9ca"}, + {file = "astroid-3.0.1.tar.gz", hash = "sha256:86b0bb7d7da0be1a7c4aedb7974e391b32d4ed89e33de6ed6902b4b15c97577e"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "asttokens" +version = "2.4.0" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"}, + {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +test = ["astroid", "pytest"] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "autopep8" +version = "2.0.4" +description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" +optional = false +python-versions = ">=3.6" +files = [ + {file = "autopep8-2.0.4-py2.py3-none-any.whl", hash = "sha256:067959ca4a07b24dbd5345efa8325f5f58da4298dab0dde0443d5ed765de80cb"}, + {file = "autopep8-2.0.4.tar.gz", hash = "sha256:2913064abd97b3419d1cc83ea71f042cb821f87e45b9c88cad5ad3c4ea87fe0c"}, +] + +[package.dependencies] +pycodestyle = ">=2.10.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[[package]] +name = "babel" +version = "2.13.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, + {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "blacken-docs" +version = "1.16.0" +description = "Run Black on Python code blocks in documentation files." +optional = false +python-versions = ">=3.8" +files = [ + {file = "blacken_docs-1.16.0-py3-none-any.whl", hash = "sha256:b0dcb84b28ebfb352a2539202d396f50e15a54211e204a8005798f1d1edb7df8"}, + {file = "blacken_docs-1.16.0.tar.gz", hash = "sha256:b4bdc3f3d73898dfbf0166f292c6ccfe343e65fc22ddef5319c95d1a8dcc6c1c"}, +] + +[package.dependencies] +black = ">=22.1.0" + +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cloudpickle" +version = "3.0.0" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, + {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, +] + +[[package]] +name = "codecov" +version = "2.1.13" +description = "Hosted coverage reports for GitHub, Bitbucket and Gitlab" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "codecov-2.1.13-py2.py3-none-any.whl", hash = "sha256:c2ca5e51bba9ebb43644c43d0690148a55086f7f5e6fd36170858fa4206744d5"}, + {file = "codecov-2.1.13.tar.gz", hash = "sha256:2362b685633caeaf45b9951a9b76ce359cd3581dd515b430c6c3f5dfb4d92a8c"}, +] + +[package.dependencies] +coverage = "*" +requests = ">=2.7.9" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "comm" +version = "0.1.4" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.6" +files = [ + {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, + {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] +test = ["pytest"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dask" +version = "2023.10.0" +description = "Parallel PyData with Task Scheduling" +optional = false +python-versions = ">=3.9" +files = [ + {file = "dask-2023.10.0-py3-none-any.whl", hash = "sha256:cb133919ff6f1fc021efe1eef24df0e4acecf33a7708e7b04d2dea6b45e166bb"}, + {file = "dask-2023.10.0.tar.gz", hash = "sha256:3fdfdbdb5f9f3a556487bf37142e5a730dab2f2c8eca0b6c79d11199c30220e3"}, +] + +[package.dependencies] +click = ">=8.0" +cloudpickle = ">=1.5.0" +fsspec = ">=2021.09.0" +importlib-metadata = ">=4.13.0" +numpy = {version = ">=1.21", optional = true, markers = "extra == \"array\""} +packaging = ">=20.0" +pandas = {version = ">=1.3", optional = true, markers = "extra == \"dataframe\""} +partd = ">=1.2.0" +pyyaml = ">=5.3.1" +toolz = ">=0.10.0" + +[package.extras] +array = ["numpy (>=1.21)"] +complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)"] +dataframe = ["dask[array]", "pandas (>=1.3)"] +diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] +distributed = ["distributed (==2023.10.0)"] +test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] + +[[package]] +name = "debugpy" +version = "1.8.0" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "2.0.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = "*" +files = [ + {file = "executing-2.0.0-py2.py3-none-any.whl", hash = "sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657"}, + {file = "executing-2.0.0.tar.gz", hash = "sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "fastjsonschema" +version = "2.18.1" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.18.1-py3-none-any.whl", hash = "sha256:aec6a19e9f66e9810ab371cc913ad5f4e9e479b63a7072a2cd060a9369e329a8"}, + {file = "fastjsonschema-2.18.1.tar.gz", hash = "sha256:06dc8680d937628e993fa0cd278f196d20449a1adc087640710846b324d422ea"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "filelock" +version = "3.12.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, + {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +typing = ["typing-extensions (>=4.7.1)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "fsspec" +version = "2023.9.2" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.9.2-py3-none-any.whl", hash = "sha256:603dbc52c75b84da501b9b2ec8c11e1f61c25984c4a0dda1f129ef391fbfc9b4"}, + {file = "fsspec-2023.9.2.tar.gz", hash = "sha256:80bfb8c70cc27b2178cc62a935ecf242fc6e8c3fb801f9c571fc01b1e715ba7d"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "greenlet" +version = "3.0.0" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"}, + {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"}, + {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"}, + {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"}, + {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"}, + {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"}, + {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, + {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, + {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, + {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, + {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"}, + {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, + {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, + {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"}, + {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"}, + {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"}, + {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"}, + {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"}, + {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"}, + {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"}, + {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"}, + {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"}, + {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"}, + {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"}, + {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"}, + {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"}, + {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"}, + {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"}, + {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, + {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, + {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, + {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, +] + +[package.extras] +docs = ["Sphinx"] +test = ["objgraph", "psutil"] + +[[package]] +name = "identify" +version = "2.5.30" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"}, + {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.8.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.25.2" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.25.2-py3-none-any.whl", hash = "sha256:2e2ee359baba19f10251b99415bb39de1e97d04e1fab385646f24f0596510b77"}, + {file = "ipykernel-6.25.2.tar.gz", hash = "sha256:f468ddd1f17acb48c8ce67fcfa49ba6d46d4f9ac0438c1f441be7c3d1372230b"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=20" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.16.1" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.9" +files = [ + {file = "ipython-8.16.1-py3-none-any.whl", hash = "sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e"}, + {file = "ipython-8.16.1.tar.gz", hash = "sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.19.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.7.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] + +[package.dependencies] +referencing = ">=0.28.0" + +[[package]] +name = "jupyter-client" +version = "8.4.0" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.4.0-py3-none-any.whl", hash = "sha256:6a2a950ec23a8f62f9e4c66acec7f0ea6c7d1f80ba0992e747b10c56ce2e6dbe"}, + {file = "jupyter_client-8.4.0.tar.gz", hash = "sha256:dc1b857d5d7d76ac101766c6e9b646bf18742721126e72e5d484c75a993cada2"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.4.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.4.0-py3-none-any.whl", hash = "sha256:66e252f675ac04dcf2feb6ed4afb3cd7f68cf92f483607522dc251f32d471571"}, + {file = "jupyter_core-5.4.0.tar.gz", hash = "sha256:e4b98344bb94ee2e3e6c4519a97d001656009f9cb2b7f2baf15b3c205770011d"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.2.2" +description = "Pygments theme using JupyterLab CSS variables" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, +] + +[[package]] +name = "jupytext" +version = "1.15.2" +description = "Jupyter notebooks as Markdown documents, Julia, Python or R scripts" +optional = false +python-versions = "~=3.6" +files = [ + {file = "jupytext-1.15.2-py3-none-any.whl", hash = "sha256:ef2a1a3eb8f63d84a3b3772014bdfbe238e4e12a30c4309b8c89e0a54adeb7d1"}, + {file = "jupytext-1.15.2.tar.gz", hash = "sha256:c9976e24d834e991906c1de55af4b6d512d764f6372aabae45fc1ea72b589173"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0" +mdit-py-plugins = "*" +nbformat = "*" +pyyaml = "*" +toml = "*" + +[package.extras] +rst2md = ["sphinx-gallery (>=0.7.0,<0.8.0)"] +toml = ["toml"] + +[[package]] +name = "locket" +version = "1.0.0" +description = "File-based locks for Python on Linux and Windows" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, + {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.0" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, + {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + +[[package]] +name = "mypy" +version = "1.6.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "myst-parser" +version = "2.0.0" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-2.0.0-py3-none-any.whl", hash = "sha256:7c36344ae39c8e740dad7fdabf5aa6fc4897a813083c6cc9990044eb93656b14"}, + {file = "myst_parser-2.0.0.tar.gz", hash = "sha256:ea929a67a6a0b1683cdbe19b8d2e724cd7643f8aa3e7bb18dd65beac3483bead"}, +] + +[package.dependencies] +docutils = ">=0.16,<0.21" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] + +[[package]] +name = "nbclient" +version = "0.8.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "nbclient-0.8.0-py3-none-any.whl", hash = "sha256:25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548"}, + {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, +] + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.4" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.9.2" +description = "Converting Jupyter Notebooks" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbconvert-7.9.2-py3-none-any.whl", hash = "sha256:39fe4b8bdd1b0104fdd86fc8a43a9077ba64c720bda4c6132690d917a0a154ee"}, + {file = "nbconvert-7.9.2.tar.gz", hash = "sha256:e56cc7588acc4f93e2bb5a34ec69028e4941797b2bfaf6462f18a41d1cc258c9"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "!=5.0.0" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.1" + +[package.extras] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["nbconvert[qtpng]"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest", "pytest-dependency"] +webpdf = ["playwright"] + +[[package]] +name = "nbformat" +version = "5.9.2" +description = "The Jupyter Notebook format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, + {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, +] + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nbqa" +version = "1.7.0" +description = "Run any standard Python code quality tool on a Jupyter Notebook" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "nbqa-1.7.0-py3-none-any.whl", hash = "sha256:42a79b0f57c3ef47b8b223dc8c265f499812f529dc5154688449e7bd05d76b7e"}, + {file = "nbqa-1.7.0.tar.gz", hash = "sha256:117112ad6d618ff13afc5ba41ca0ff21c23f084a6df7875fb04b7f2c01a136c4"}, +] + +[package.dependencies] +autopep8 = ">=1.5" +black = {version = "*", optional = true, markers = "extra == \"toolchain\""} +blacken-docs = {version = "*", optional = true, markers = "extra == \"toolchain\""} +flake8 = {version = "*", optional = true, markers = "extra == \"toolchain\""} +ipython = ">=7.8.0" +isort = {version = "*", optional = true, markers = "extra == \"toolchain\""} +jupytext = {version = "*", optional = true, markers = "extra == \"toolchain\""} +mypy = {version = "*", optional = true, markers = "extra == \"toolchain\""} +pylint = {version = "*", optional = true, markers = "extra == \"toolchain\""} +pyupgrade = {version = "*", optional = true, markers = "extra == \"toolchain\""} +ruff = {version = "*", optional = true, markers = "extra == \"toolchain\""} +tokenize-rt = ">=3.2.0" +tomli = "*" + +[package.extras] +toolchain = ["black", "blacken-docs", "flake8", "isort", "jupytext", "mypy", "pylint", "pyupgrade", "ruff"] + +[[package]] +name = "nbsphinx" +version = "0.8.12" +description = "Jupyter Notebook Tools for Sphinx" +optional = false +python-versions = ">=3.6" +files = [ + {file = "nbsphinx-0.8.12-py3-none-any.whl", hash = "sha256:c15b681c7fce287000856f91fe1edac50d29f7b0c15bbc746fbe55c8eb84750b"}, + {file = "nbsphinx-0.8.12.tar.gz", hash = "sha256:76570416cdecbeb21dbf5c3d6aa204ced6c1dd7ebef4077b5c21b8c6ece9533f"}, +] + +[package.dependencies] +docutils = "*" +jinja2 = "*" +nbconvert = "!=5.4" +nbformat = "*" +sphinx = ">=1.8" +traitlets = ">=5" + +[[package]] +name = "nbstripout" +version = "0.6.1" +description = "Strips outputs from Jupyter and IPython notebooks" +optional = false +python-versions = ">=3.6" +files = [ + {file = "nbstripout-0.6.1-py2.py3-none-any.whl", hash = "sha256:5ff6eb0debbcd656c4a64db8e082a24fabcfc753a9e8c9f6d786971e8f29e110"}, + {file = "nbstripout-0.6.1.tar.gz", hash = "sha256:9065bcdd1488b386e4f3c081ffc1d48f4513a2f8d8bf4d0d9a28208c5dafe9d3"}, +] + +[package.dependencies] +nbformat = "*" + +[[package]] +name = "nest-asyncio" +version = "1.5.8" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "numpy" +version = "1.26.1" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = "<3.13,>=3.9" +files = [ + {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"}, + {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9163ca8aeb7fd32fe93866490654d2f7dda4e61bc6297bf72ce07fdc02f67"}, + {file = "numpy-1.26.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:78ca54b2f9daffa5f323f34cdf21e1d9779a54073f0018a3094ab907938331a2"}, + {file = "numpy-1.26.1-cp310-cp310-win32.whl", hash = "sha256:d1cfc92db6af1fd37a7bb58e55c8383b4aa1ba23d012bdbba26b4bcca45ac297"}, + {file = "numpy-1.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2984cb6caaf05294b8466966627e80bf6c7afd273279077679cb010acb0e5ab"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd7837b2b734ca72959a1caf3309457a318c934abef7a43a14bb984e574bbb9a"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c59c046c31a43310ad0199d6299e59f57a289e22f0f36951ced1c9eac3665b9"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58e8c51a7cf43090d124d5073bc29ab2755822181fcad978b12e144e5e5a4b3"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6081aed64714a18c72b168a9276095ef9155dd7888b9e74b5987808f0dd0a974"}, + {file = "numpy-1.26.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:97e5d6a9f0702c2863aaabf19f0d1b6c2628fbe476438ce0b5ce06e83085064c"}, + {file = "numpy-1.26.1-cp311-cp311-win32.whl", hash = "sha256:b9d45d1dbb9de84894cc50efece5b09939752a2d75aab3a8b0cef6f3a35ecd6b"}, + {file = "numpy-1.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:3649d566e2fc067597125428db15d60eb42a4e0897fc48d28cb75dc2e0454e53"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d1bd82d539607951cac963388534da3b7ea0e18b149a53cf883d8f699178c0f"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd5ced4e5a96dac6725daeb5242a35494243f2239244fad10a90ce58b071d24"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03fb25610ef560a6201ff06df4f8105292ba56e7cdd196ea350d123fc32e24e"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcfaf015b79d1f9f9c9fd0731a907407dc3e45769262d657d754c3a028586124"}, + {file = "numpy-1.26.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e509cbc488c735b43b5ffea175235cec24bbc57b227ef1acc691725beb230d1c"}, + {file = "numpy-1.26.1-cp312-cp312-win32.whl", hash = "sha256:af22f3d8e228d84d1c0c44c1fbdeb80f97a15a0abe4f080960393a00db733b66"}, + {file = "numpy-1.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f42284ebf91bdf32fafac29d29d4c07e5e9d1af862ea73686581773ef9e73a7"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb894accfd16b867d8643fc2ba6c8617c78ba2828051e9a69511644ce86ce83e"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e44ccb93f30c75dfc0c3aa3ce38f33486a75ec9abadabd4e59f114994a9c4617"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9696aa2e35cc41e398a6d42d147cf326f8f9d81befcb399bc1ed7ffea339b64e"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b411040beead47a228bde3b2241100454a6abde9df139ed087bd73fc0a4908"}, + {file = "numpy-1.26.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e11668d6f756ca5ef534b5be8653d16c5352cbb210a5c2a79ff288e937010d5"}, + {file = "numpy-1.26.1-cp39-cp39-win32.whl", hash = "sha256:d1d2c6b7dd618c41e202c59c1413ef9b2c8e8a15f5039e344af64195459e3104"}, + {file = "numpy-1.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:59227c981d43425ca5e5c01094d59eb14e8772ce6975d4b2fc1e106a833d5ae2"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06934e1a22c54636a059215d6da99e23286424f316fddd979f5071093b648668"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76ff661a867d9272cd2a99eed002470f46dbe0943a5ffd140f49be84f68ffc42"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"}, + {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"}, +] + +[[package]] +name = "numpydoc" +version = "1.6.0" +description = "Sphinx extension to support docstrings in Numpy format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpydoc-1.6.0-py3-none-any.whl", hash = "sha256:b6ddaa654a52bdf967763c1e773be41f1c3ae3da39ee0de973f2680048acafaa"}, + {file = "numpydoc-1.6.0.tar.gz", hash = "sha256:ae7a5380f0a06373c3afe16ccd15bd79bc6b07f2704cbc6f1e7ecc94b4f5fc0d"}, +] + +[package.dependencies] +Jinja2 = ">=2.10" +sphinx = ">=5" +tabulate = ">=0.8.10" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +developer = ["pre-commit (>=3.3)", "tomli"] +doc = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pydata-sphinx-theme (>=0.13.3)", "sphinx (>=7)"] +test = ["matplotlib", "pytest", "pytest-cov"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "1.5.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, +] +python-dateutil = ">=2.8.1" +pytz = ">=2020.1" + +[package.extras] +test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] + +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "partd" +version = "1.4.1" +description = "Appendable key-value storage" +optional = false +python-versions = ">=3.7" +files = [ + {file = "partd-1.4.1-py3-none-any.whl", hash = "sha256:27e766663d36c161e2827aa3e28541c992f0b9527d3cca047e13fb3acdb989e6"}, + {file = "partd-1.4.1.tar.gz", hash = "sha256:56c25dd49e6fea5727e731203c466c6e092f308d8f0024e199d02f6aa2167f67"}, +] + +[package.dependencies] +locket = "*" +toolz = "*" + +[package.extras] +complete = ["blosc", "numpy (>=1.9.0)", "pandas (>=0.19.0)", "pyzmq"] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "pbr" +version = "5.11.1" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, +] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "platformdirs" +version = "3.11.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.21.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prompt-toolkit" +version = "3.0.39" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.6" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pyarrow" +version = "11.0.0" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyarrow-11.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:40bb42afa1053c35c749befbe72f6429b7b5f45710e85059cdd534553ebcf4f2"}, + {file = "pyarrow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7c28b5f248e08dea3b3e0c828b91945f431f4202f1a9fe84d1012a761324e1ba"}, + {file = "pyarrow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a37bc81f6c9435da3c9c1e767324ac3064ffbe110c4e460660c43e144be4ed85"}, + {file = "pyarrow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7c53def8dbbc810282ad308cc46a523ec81e653e60a91c609c2233ae407689"}, + {file = "pyarrow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:25aa11c443b934078bfd60ed63e4e2d42461682b5ac10f67275ea21e60e6042c"}, + {file = "pyarrow-11.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:e217d001e6389b20a6759392a5ec49d670757af80101ee6b5f2c8ff0172e02ca"}, + {file = "pyarrow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad42bb24fc44c48f74f0d8c72a9af16ba9a01a2ccda5739a517aa860fa7e3d56"}, + {file = "pyarrow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d942c690ff24a08b07cb3df818f542a90e4d359381fbff71b8f2aea5bf58841"}, + {file = "pyarrow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f010ce497ca1b0f17a8243df3048055c0d18dcadbcc70895d5baf8921f753de5"}, + {file = "pyarrow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2f51dc7ca940fdf17893227edb46b6784d37522ce08d21afc56466898cb213b2"}, + {file = "pyarrow-11.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:1cbcfcbb0e74b4d94f0b7dde447b835a01bc1d16510edb8bb7d6224b9bf5bafc"}, + {file = "pyarrow-11.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaee8f79d2a120bf3e032d6d64ad20b3af6f56241b0ffc38d201aebfee879d00"}, + {file = "pyarrow-11.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:410624da0708c37e6a27eba321a72f29d277091c8f8d23f72c92bada4092eb5e"}, + {file = "pyarrow-11.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2d53ba72917fdb71e3584ffc23ee4fcc487218f8ff29dd6df3a34c5c48fe8c06"}, + {file = "pyarrow-11.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f12932e5a6feb5c58192209af1d2607d488cb1d404fbc038ac12ada60327fa34"}, + {file = "pyarrow-11.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:41a1451dd895c0b2964b83d91019e46f15b5564c7ecd5dcb812dadd3f05acc97"}, + {file = "pyarrow-11.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc2344be80e5dce4e1b80b7c650d2fc2061b9eb339045035a1baa34d5b8f1c"}, + {file = "pyarrow-11.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f40be0d7381112a398b93c45a7e69f60261e7b0269cc324e9f739ce272f4f70"}, + {file = "pyarrow-11.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:362a7c881b32dc6b0eccf83411a97acba2774c10edcec715ccaab5ebf3bb0835"}, + {file = "pyarrow-11.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:ccbf29a0dadfcdd97632b4f7cca20a966bb552853ba254e874c66934931b9841"}, + {file = "pyarrow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e99be85973592051e46412accea31828da324531a060bd4585046a74ba45854"}, + {file = "pyarrow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69309be84dcc36422574d19c7d3a30a7ea43804f12552356d1ab2a82a713c418"}, + {file = "pyarrow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da93340fbf6f4e2a62815064383605b7ffa3e9eeb320ec839995b1660d69f89b"}, + {file = "pyarrow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:caad867121f182d0d3e1a0d36f197df604655d0b466f1bc9bafa903aa95083e4"}, + {file = "pyarrow-11.0.0.tar.gz", hash = "sha256:5461c57dbdb211a632a48facb9b39bbeb8a7905ec95d768078525283caef5f6d"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pylint" +version = "3.0.1" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.1-py3-none-any.whl", hash = "sha256:9c90b89e2af7809a1697f6f5f93f1d0e518ac566e2ac4d2af881a69c13ad01ea"}, + {file = "pylint-3.0.1.tar.gz", hash = "sha256:81c6125637be216b4652ae50cc42b9f8208dfb725cdc7e04c48f6902f4dbdf40"}, +] + +[package.dependencies] +astroid = ">=3.0.0,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = {version = ">=0.2", markers = "python_version < \"3.11\""} +isort = ">=4.2.5,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pytest" +version = "7.4.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + +[[package]] +name = "pyupgrade" +version = "3.15.0" +description = "A tool to automatically upgrade syntax for newer versions." +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "pyupgrade-3.15.0-py2.py3-none-any.whl", hash = "sha256:8dc8ebfaed43566e2c65994162795017c7db11f531558a74bc8aa077907bc305"}, + {file = "pyupgrade-3.15.0.tar.gz", hash = "sha256:a7fde381060d7c224f55aef7a30fae5ac93bbc428367d27e70a603bc2acd4f00"}, +] + +[package.dependencies] +tokenize-rt = ">=5.2.0" + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyzmq" +version = "25.1.1" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, + {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, + {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, + {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, + {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, + {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, + {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, + {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, + {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, + {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, + {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, + {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "referencing" +version = "0.30.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rpds-py" +version = "0.10.6" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.10.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bdc11f9623870d75692cc33c59804b5a18d7b8a4b79ef0b00b773a27397d1f6"}, + {file = "rpds_py-0.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26857f0f44f0e791f4a266595a7a09d21f6b589580ee0585f330aaccccb836e3"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f5e15c953ace2e8dde9824bdab4bec50adb91a5663df08d7d994240ae6fa31"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61fa268da6e2e1cd350739bb61011121fa550aa2545762e3dc02ea177ee4de35"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c48f3fbc3e92c7dd6681a258d22f23adc2eb183c8cb1557d2fcc5a024e80b094"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0503c5b681566e8b722fe8c4c47cce5c7a51f6935d5c7012c4aefe952a35eed"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734c41f9f57cc28658d98270d3436dba65bed0cfc730d115b290e970150c540d"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a5d7ed104d158c0042a6a73799cf0eb576dfd5fc1ace9c47996e52320c37cb7c"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3df0bc35e746cce42579826b89579d13fd27c3d5319a6afca9893a9b784ff1b"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:73e0a78a9b843b8c2128028864901f55190401ba38aae685350cf69b98d9f7c9"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ed505ec6305abd2c2c9586a7b04fbd4baf42d4d684a9c12ec6110deefe2a063"}, + {file = "rpds_py-0.10.6-cp310-none-win32.whl", hash = "sha256:d97dd44683802000277bbf142fd9f6b271746b4846d0acaf0cefa6b2eaf2a7ad"}, + {file = "rpds_py-0.10.6-cp310-none-win_amd64.whl", hash = "sha256:b455492cab07107bfe8711e20cd920cc96003e0da3c1f91297235b1603d2aca7"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e8cdd52744f680346ff8c1ecdad5f4d11117e1724d4f4e1874f3a67598821069"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66414dafe4326bca200e165c2e789976cab2587ec71beb80f59f4796b786a238"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc435d059f926fdc5b05822b1be4ff2a3a040f3ae0a7bbbe672babb468944722"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7f2219cb72474571974d29a191714d822e58be1eb171f229732bc6fdedf0ac"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3953c6926a63f8ea5514644b7afb42659b505ece4183fdaaa8f61d978754349e"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bb2e4826be25e72013916eecd3d30f66fd076110de09f0e750163b416500721"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf347b495b197992efc81a7408e9a83b931b2f056728529956a4d0858608b80"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:102eac53bb0bf0f9a275b438e6cf6904904908562a1463a6fc3323cf47d7a532"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40f93086eef235623aa14dbddef1b9fb4b22b99454cb39a8d2e04c994fb9868c"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e22260a4741a0e7a206e175232867b48a16e0401ef5bce3c67ca5b9705879066"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4e56860a5af16a0fcfa070a0a20c42fbb2012eed1eb5ceeddcc7f8079214281"}, + {file = "rpds_py-0.10.6-cp311-none-win32.whl", hash = "sha256:0774a46b38e70fdde0c6ded8d6d73115a7c39d7839a164cc833f170bbf539116"}, + {file = "rpds_py-0.10.6-cp311-none-win_amd64.whl", hash = "sha256:4a5ee600477b918ab345209eddafde9f91c0acd931f3776369585a1c55b04c57"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:5ee97c683eaface61d38ec9a489e353d36444cdebb128a27fe486a291647aff6"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0713631d6e2d6c316c2f7b9320a34f44abb644fc487b77161d1724d883662e31"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a53f5998b4bbff1cb2e967e66ab2addc67326a274567697379dd1e326bded7"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a555ae3d2e61118a9d3e549737bb4a56ff0cec88a22bd1dfcad5b4e04759175"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:945eb4b6bb8144909b203a88a35e0a03d22b57aefb06c9b26c6e16d72e5eb0f0"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c215eb46307c25f9fd2771cac8135d14b11a92ae48d17968eda5aa9aaf5071"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1b3cd23d905589cb205710b3988fc8f46d4a198cf12862887b09d7aaa6bf9b9"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64ccc28683666672d7c166ed465c09cee36e306c156e787acef3c0c62f90da5a"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:516a611a2de12fbea70c78271e558f725c660ce38e0006f75139ba337d56b1f6"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9ff93d3aedef11f9c4540cf347f8bb135dd9323a2fc705633d83210d464c579d"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d858532212f0650be12b6042ff4378dc2efbb7792a286bee4489eaa7ba010586"}, + {file = "rpds_py-0.10.6-cp312-none-win32.whl", hash = "sha256:3c4eff26eddac49d52697a98ea01b0246e44ca82ab09354e94aae8823e8bda02"}, + {file = "rpds_py-0.10.6-cp312-none-win_amd64.whl", hash = "sha256:150eec465dbc9cbca943c8e557a21afdcf9bab8aaabf386c44b794c2f94143d2"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:cf693eb4a08eccc1a1b636e4392322582db2a47470d52e824b25eca7a3977b53"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4134aa2342f9b2ab6c33d5c172e40f9ef802c61bb9ca30d21782f6e035ed0043"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e782379c2028a3611285a795b89b99a52722946d19fc06f002f8b53e3ea26ea9"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f6da6d842195fddc1cd34c3da8a40f6e99e4a113918faa5e60bf132f917c247"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a9fe992887ac68256c930a2011255bae0bf5ec837475bc6f7edd7c8dfa254e"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b788276a3c114e9f51e257f2a6f544c32c02dab4aa7a5816b96444e3f9ffc336"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa1afc70a02645809c744eefb7d6ee8fef7e2fad170ffdeacca267fd2674f13"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bddd4f91eede9ca5275e70479ed3656e76c8cdaaa1b354e544cbcf94c6fc8ac4"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:775049dfa63fb58293990fc59473e659fcafd953bba1d00fc5f0631a8fd61977"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c6c45a2d2b68c51fe3d9352733fe048291e483376c94f7723458cfd7b473136b"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0699ab6b8c98df998c3eacf51a3b25864ca93dab157abe358af46dc95ecd9801"}, + {file = "rpds_py-0.10.6-cp38-none-win32.whl", hash = "sha256:ebdab79f42c5961682654b851f3f0fc68e6cc7cd8727c2ac4ffff955154123c1"}, + {file = "rpds_py-0.10.6-cp38-none-win_amd64.whl", hash = "sha256:24656dc36f866c33856baa3ab309da0b6a60f37d25d14be916bd3e79d9f3afcf"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:0898173249141ee99ffcd45e3829abe7bcee47d941af7434ccbf97717df020e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9184fa6c52a74a5521e3e87badbf9692549c0fcced47443585876fcc47e469"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5752b761902cd15073a527b51de76bbae63d938dc7c5c4ad1e7d8df10e765138"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99a57006b4ec39dbfb3ed67e5b27192792ffb0553206a107e4aadb39c5004cd5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09586f51a215d17efdb3a5f090d7cbf1633b7f3708f60a044757a5d48a83b393"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e225a6a14ecf44499aadea165299092ab0cba918bb9ccd9304eab1138844490b"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2039f8d545f20c4e52713eea51a275e62153ee96c8035a32b2abb772b6fc9e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34ad87a831940521d462ac11f1774edf867c34172010f5390b2f06b85dcc6014"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdc88b6b01015da066da3fb76545e8bb9a6880a5ebf89e0f0b2e3ca557b3ab7"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25860ed5c4e7f5e10c496ea78af46ae8d8468e0be745bd233bab9ca99bfd2647"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7854a207ef77319ec457c1eb79c361b48807d252d94348305db4f4b62f40f7f3"}, + {file = "rpds_py-0.10.6-cp39-none-win32.whl", hash = "sha256:e6fcc026a3f27c1282c7ed24b7fcac82cdd70a0e84cc848c0841a3ab1e3dea2d"}, + {file = "rpds_py-0.10.6-cp39-none-win_amd64.whl", hash = "sha256:e98c4c07ee4c4b3acf787e91b27688409d918212dfd34c872201273fdd5a0e18"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:68fe9199184c18d997d2e4293b34327c0009a78599ce703e15cd9a0f47349bba"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3339eca941568ed52d9ad0f1b8eb9fe0958fa245381747cecf2e9a78a5539c42"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a360cfd0881d36c6dc271992ce1eda65dba5e9368575663de993eeb4523d895f"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:031f76fc87644a234883b51145e43985aa2d0c19b063e91d44379cd2786144f8"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f36a9d751f86455dc5278517e8b65580eeee37d61606183897f122c9e51cef3"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:052a832078943d2b2627aea0d19381f607fe331cc0eb5df01991268253af8417"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023574366002bf1bd751ebaf3e580aef4a468b3d3c216d2f3f7e16fdabd885ed"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:defa2c0c68734f4a82028c26bcc85e6b92cced99866af118cd6a89b734ad8e0d"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879fb24304ead6b62dbe5034e7b644b71def53c70e19363f3c3be2705c17a3b4"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:53c43e10d398e365da2d4cc0bcaf0854b79b4c50ee9689652cdc72948e86f487"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3777cc9dea0e6c464e4b24760664bd8831738cc582c1d8aacf1c3f546bef3f65"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:40578a6469e5d1df71b006936ce95804edb5df47b520c69cf5af264d462f2cbb"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:cf71343646756a072b85f228d35b1d7407da1669a3de3cf47f8bbafe0c8183a4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f32b53f424fc75ff7b713b2edb286fdbfc94bf16317890260a81c2c00385dc"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81de24a1c51cfb32e1fbf018ab0bdbc79c04c035986526f76c33e3f9e0f3356c"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac17044876e64a8ea20ab132080ddc73b895b4abe9976e263b0e30ee5be7b9c2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8a78bd4879bff82daef48c14d5d4057f6856149094848c3ed0ecaf49f5aec2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ca33811e1d95cac8c2e49cb86c0fb71f4d8409d8cbea0cb495b6dbddb30a55"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c63c3ef43f0b3fb00571cff6c3967cc261c0ebd14a0a134a12e83bdb8f49f21f"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:7fde6d0e00b2fd0dbbb40c0eeec463ef147819f23725eda58105ba9ca48744f4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:79edd779cfc46b2e15b0830eecd8b4b93f1a96649bcb502453df471a54ce7977"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9164ec8010327ab9af931d7ccd12ab8d8b5dc2f4c6a16cbdd9d087861eaaefa1"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d29ddefeab1791e3c751e0189d5f4b3dbc0bbe033b06e9c333dca1f99e1d523e"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:30adb75ecd7c2a52f5e76af50644b3e0b5ba036321c390b8e7ec1bb2a16dd43c"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd609fafdcdde6e67a139898196698af37438b035b25ad63704fd9097d9a3482"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6eef672de005736a6efd565577101277db6057f65640a813de6c2707dc69f396"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cf4393c7b41abbf07c88eb83e8af5013606b1cdb7f6bc96b1b3536b53a574b8"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad857f42831e5b8d41a32437f88d86ead6c191455a3499c4b6d15e007936d4cf"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7360573f1e046cb3b0dceeb8864025aa78d98be4bb69f067ec1c40a9e2d9df"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d08f63561c8a695afec4975fae445245386d645e3e446e6f260e81663bfd2e38"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:f0f17f2ce0f3529177a5fff5525204fad7b43dd437d017dd0317f2746773443d"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:442626328600bde1d09dc3bb00434f5374948838ce75c41a52152615689f9403"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e9616f5bd2595f7f4a04b67039d890348ab826e943a9bfdbe4938d0eba606971"}, + {file = "rpds_py-0.10.6.tar.gz", hash = "sha256:4ce5a708d65a8dbf3748d2474b580d606b1b9f91b5c6ab2a316e0b0cf7a4ba50"}, +] + +[[package]] +name = "ruff" +version = "0.1.1" +description = "An extremely fast Python linter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.1-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b7cdc893aef23ccc14c54bd79a8109a82a2c527e11d030b62201d86f6c2b81c5"}, + {file = "ruff-0.1.1-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:620d4b34302538dbd8bbbe8fdb8e8f98d72d29bd47e972e2b59ce6c1e8862257"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a909d3930afdbc2e9fd893b0034479e90e7981791879aab50ce3d9f55205bd6"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3305d1cb4eb8ff6d3e63a48d1659d20aab43b49fe987b3ca4900528342367145"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c34ae501d0ec71acf19ee5d4d889e379863dcc4b796bf8ce2934a9357dc31db7"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6aa7e63c3852cf8fe62698aef31e563e97143a4b801b57f920012d0e07049a8d"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d68367d1379a6b47e61bc9de144a47bcdb1aad7903bbf256e4c3d31f11a87ae"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc11955f6ce3398d2afe81ad7e49d0ebf0a581d8bcb27b8c300281737735e3a3"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd8eead88ea83a250499074e2a8e9d80975f0b324b1e2e679e4594da318c25"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f4780e2bb52f3863a565ec3f699319d3493b83ff95ebbb4993e59c62aaf6e75e"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8f5b24daddf35b6c207619301170cae5d2699955829cda77b6ce1e5fc69340df"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d3f9ac658ba29e07b95c80fa742b059a55aefffa8b1e078bc3c08768bdd4b11a"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3521bf910104bf781e6753282282acc145cbe3eff79a1ce6b920404cd756075a"}, + {file = "ruff-0.1.1-py3-none-win32.whl", hash = "sha256:ba3208543ab91d3e4032db2652dcb6c22a25787b85b8dc3aeff084afdc612e5c"}, + {file = "ruff-0.1.1-py3-none-win_amd64.whl", hash = "sha256:3ff3006c97d9dc396b87fb46bb65818e614ad0181f059322df82bbfe6944e264"}, + {file = "ruff-0.1.1-py3-none-win_arm64.whl", hash = "sha256:e140bd717c49164c8feb4f65c644046fe929c46f42493672853e3213d7bdbce2"}, + {file = "ruff-0.1.1.tar.gz", hash = "sha256:c90461ae4abec261609e5ea436de4a4b5f2822921cf04c16d2cc9327182dbbcc"}, +] + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "sphinx" +version = "7.2.6" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.2.6-py3-none-any.whl", hash = "sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560"}, + {file = "sphinx-7.2.6.tar.gz", hash = "sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.21" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.14" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools (>=67.0)"] + +[[package]] +name = "sphinx-autoapi" +version = "2.1.1" +description = "Sphinx API documentation generator" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx-autoapi-2.1.1.tar.gz", hash = "sha256:fbadb96e79020d6b0ec45d888517bf816d6b587a2d340fbe1ec31135e300a6c8"}, + {file = "sphinx_autoapi-2.1.1-py2.py3-none-any.whl", hash = "sha256:d8da890477bd18e3327cafdead9d5a44a7d798476c6fa32492100e288250a5a3"}, +] + +[package.dependencies] +anyascii = "*" +astroid = ">=2.7" +Jinja2 = "*" +PyYAML = "*" +sphinx = ">=5.2.0" + +[package.extras] +docs = ["furo", "sphinx", "sphinx-design"] +dotnet = ["sphinxcontrib-dotnetdomain"] +go = ["sphinxcontrib-golangdomain"] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "1.24.0" +description = "Type hints (PEP 484) support for the Sphinx autodoc extension" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx_autodoc_typehints-1.24.0-py3-none-any.whl", hash = "sha256:6a73c0c61a9144ce2ed5ef2bed99d615254e5005c1cc32002017d72d69fb70e6"}, + {file = "sphinx_autodoc_typehints-1.24.0.tar.gz", hash = "sha256:94e440066941bb237704bb880785e2d05e8ae5406c88674feefbb938ad0dc6af"}, +] + +[package.dependencies] +sphinx = ">=7.0.1" + +[package.extras] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)"] +numpy = ["nptyping (>=2.5)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.6.3)"] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +description = "Add a copy button to each of your code cells." +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, + {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, +] + +[package.dependencies] +sphinx = ">=1.8" + +[package.extras] +code-style = ["pre-commit (==2.12.1)"] +rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] + +[[package]] +name = "sphinx-rtd-theme" +version = "2.0.0rc2" +description = "Read the Docs theme for Sphinx" +optional = false +python-versions = ">=3.6" +files = [ + {file = "sphinx_rtd_theme-2.0.0rc2-py2.py3-none-any.whl", hash = "sha256:f04df9213acf421c3b42f4f39005c8bc68fc4696c5b4ed4ef13d1678369713f7"}, + {file = "sphinx_rtd_theme-2.0.0rc2.tar.gz", hash = "sha256:d1270effe620df9164b1cd2d617909472a63531e21a716fd22d0fbcedf9d24ff"}, +] + +[package.dependencies] +docutils = "<0.21" +sphinx = ">=5,<8" +sphinxcontrib-jquery = ">=4,<5" + +[package.extras] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] + +[[package]] +name = "sphinxcontrib-apidoc" +version = "0.4.0" +description = "A Sphinx extension for running 'sphinx-apidoc' on each build" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-apidoc-0.4.0.tar.gz", hash = "sha256:fe59d15882472aa93c2737afbdea6bedb34ce35cbd34aa4947909f5df1500aad"}, + {file = "sphinxcontrib_apidoc-0.4.0-py3-none-any.whl", hash = "sha256:18b9fb0cd4816758ec5f8be41c64f8924991dd40fd7b10e666ec9eed2800baff"}, +] + +[package.dependencies] +pbr = "*" +Sphinx = ">=5.0.0" + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.7" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, + {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, +] + +[package.dependencies] +Sphinx = ">=5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.5" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, + {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, +] + +[package.dependencies] +Sphinx = ">=5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.4" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, + {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, +] + +[package.dependencies] +Sphinx = ">=5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = false +python-versions = ">=2.7" +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.6" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, + {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, +] + +[package.dependencies] +Sphinx = ">=5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.9" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, + {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, +] + +[package.dependencies] +Sphinx = ">=5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sqlalchemy" +version = "1.4.49" +description = "Database Abstraction Library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, + {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, + {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "tokenize-rt" +version = "5.2.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tokenize_rt-5.2.0-py2.py3-none-any.whl", hash = "sha256:b79d41a65cfec71285433511b50271b05da3584a1da144a0752e9c621a285289"}, + {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.1" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, +] + +[[package]] +name = "toolz" +version = "0.12.0" +description = "List processing tools and functional utilities" +optional = false +python-versions = ">=3.5" +files = [ + {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, + {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, +] + +[[package]] +name = "tornado" +version = "6.3.3" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, +] + +[[package]] +name = "traitlets" +version = "5.11.2" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.11.2-py3-none-any.whl", hash = "sha256:98277f247f18b2c5cabaf4af369187754f4fb0e85911d473f72329db8a7f4fae"}, + {file = "traitlets-5.11.2.tar.gz", hash = "sha256:7564b5bf8d38c40fa45498072bf4dc5e8346eb087bbf1e2ae2d8774f6a0f078e"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.24.5" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, + {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.8" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.9, <3.11" +content-hash = "4fd0a8b552849e25d13b9ba2e08934586c19f256a2e063dc8d14678605341273" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..443984e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,129 @@ +[tool.poetry] +name = "cycquery" +version = "0.1.0" +description = "A tool to query EHR databases" +authors = ["Vector AI Engineering "] +license = "Apache-2.0" +repository = "https://github.com/VectorInstitute/cyclops-query" +documentation = "https://vectorinstitute.github.io/cyclops-query/" +packages = [ + { include = "cycquery" }, +] +readme = "README.md" + +[tool.poetry.dependencies] +python = ">=3.9, <3.11" +pandas = "^1.4.1" +pyarrow = "^11.0.0" +dask = { version = "^2023.10.0", extras = ["dataframe"] } +psycopg2 = "^2.9.6" +SQLAlchemy = "^1.4.32, <2.0" + +[tool.poetry.group.codestyle.dependencies] +pytest = "^7.1.1" +pre-commit = "^2.17.0" +black = "^22.1.0" +pytest-cov = "^3.0.0" +codecov = "^2.1.13" +nbstripout = "^0.6.1" +mypy = "^1.0.0" +ruff = "^0.1.0" +nbqa = { version = "^1.7.0", extras = ["toolchain"] } + +[tool.poetry.group.docs.dependencies] +numpydoc = "^1.2" +sphinx = "^7.2.5" +sphinx-rtd-theme = "^2.0.0rc2" +sphinxcontrib-apidoc = "^0.4.0" +sphinx-autodoc-typehints = "^1.24.0" +myst-parser = "^2.0.0" +sphinx-copybutton = "^0.5.0" +sphinx-autoapi = "^2.0.0" +nbsphinx = "^0.8.11" +ipython = "^8.8.0" +ipykernel = "^6.23.0" + +[tool.mypy] +ignore_missing_imports = true +install_types = true +pretty = true +namespace_packages = true +explicit_package_bases = true +non_interactive = true +warn_unused_configs = true +allow_any_generics = false +allow_subclassing_any = false +allow_untyped_calls = false +allow_untyped_defs = false +allow_incomplete_defs = false +check_untyped_defs = true +allow_untyped_decorators = false +warn_redundant_casts = true +warn_unused_ignores = true +warn_return_any = true +implicit_reexport = false +strict_equality = true +extra_checks = true +plugins = ["sqlalchemy.ext.mypy.plugin"] + +[tool.ruff] +include = ["*.py", "pyproject.toml", "*.ipynb"] +select = [ + "A", # flake8-builtins + "B", # flake8-bugbear + "COM", # flake8-commas + "C4", # flake8-comprehensions + "RET", # flake8-return + "SIM", # flake8-simplify + "ICN", # flake8-import-conventions + "Q", # flake8-quotes + "RSE", # flake8-raise + "D", # pydocstyle + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "W", # pycodestyle + "N", # pep8-naming + "ERA", # eradicate + "PL", # pylint +] +fixable = ["A", "B", "COM", "C4", "RET", "SIM", "ICN", "Q", "RSE", "D", "E", "F", "I", "W", "N", "ERA", "PL"] +line-length = 88 +ignore = [ + "B905", # `zip()` without an explicit `strict=` parameter + "E501", # line too long + "D203", # 1 blank line required before class docstring + "D213", # Multi-line docstring summary should start at the second line + "PLR2004", # Replace magic number with named constant + "PLR0913", # Too many arguments +] + +# Ignore import violations in all `__init__.py` files. +[tool.ruff.per-file-ignores] +"__init__.py" = ["E402", "F401", "F403", "F811"] + +[tool.ruff.pep8-naming] +ignore-names = ["X*", "setUp"] + +[tool.ruff.isort] +lines-after-imports = 2 + +[tool.ruff.pydocstyle] +convention = "numpy" + +[tool.ruff.pycodestyle] +max-doc-length = 88 + +[tool.pytest.ini_options] +markers = [ + "integration_test: marks tests as integration tests", +] + +[tool.coverage] + [tool.coverage.run] + source=["cycquery"] + omit=["tests/*", "*__init__.py"] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..183e0ed --- /dev/null +++ b/setup.py @@ -0,0 +1,6 @@ +"""Setup package.""" + +from setuptools import setup + + +setup() diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..d420712 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests.""" diff --git a/tests/cycquery/__init__.py b/tests/cycquery/__init__.py new file mode 100644 index 0000000..9783b8c --- /dev/null +++ b/tests/cycquery/__init__.py @@ -0,0 +1 @@ +"""Tests for query API package.""" diff --git a/tests/cycquery/post_process/__init__.py b/tests/cycquery/post_process/__init__.py new file mode 100644 index 0000000..27484c0 --- /dev/null +++ b/tests/cycquery/post_process/__init__.py @@ -0,0 +1 @@ +"""Tests for query postprocess functions.""" diff --git a/tests/cycquery/post_process/test_util.py b/tests/cycquery/post_process/test_util.py new file mode 100644 index 0000000..c993e1d --- /dev/null +++ b/tests/cycquery/post_process/test_util.py @@ -0,0 +1,51 @@ +"""Tests for post-processing functions in the query package.""" + +import pandas as pd + +from cycquery.post_process.util import event_time_between + + +def test_event_time_between(): + """Test event_time_between fn.""" + admit_ts = pd.Series( + [ + pd.Timestamp(year=2017, month=1, day=1, hour=12), + pd.Timestamp(year=2017, month=1, day=1, hour=12), + ], + ) + discharge_ts = pd.Series( + [ + pd.Timestamp(year=2017, month=1, day=7, hour=12), + pd.Timestamp(year=2018, month=1, day=9, hour=12), + ], + ) + is_between = event_time_between( + pd.Timestamp(year=2017, month=1, day=2), + admit_ts, + discharge_ts, + ) + assert is_between[0] + assert is_between[1] + is_between = event_time_between( + pd.Timestamp(year=2017, month=1, day=7, hour=12), + admit_ts, + discharge_ts, + ) + assert not is_between[0] + assert is_between[1] + is_between = event_time_between( + pd.Timestamp(year=2017, month=1, day=7, hour=12), + admit_ts, + discharge_ts, + discharge_inclusive=True, + ) + assert is_between[0] + assert is_between[1] + is_between = event_time_between( + pd.Timestamp(year=2017, month=1, day=1, hour=12), + admit_ts, + discharge_ts, + admit_inclusive=False, + ) + assert not is_between[0] + assert not is_between[1] diff --git a/tests/cycquery/test_base.py b/tests/cycquery/test_base.py new file mode 100644 index 0000000..45d548f --- /dev/null +++ b/tests/cycquery/test_base.py @@ -0,0 +1,22 @@ +"""Test base dataset querier, using OMOPQuerier as an example.""" + +import pytest + +from cycquery import OMOPQuerier + + +@pytest.mark.integration_test() +def test_dataset_querier(): + """Test base querier methods using OMOPQuerier.""" + querier = OMOPQuerier( + database="synthea_integration_test", + schema_name="cdm_synthea10", + user="postgres", + password="pwd", + ) + assert len(querier.list_tables()) == 69 + assert len(querier.list_schemas()) == 4 + assert len(querier.list_tables(schema_name="cdm_synthea10")) == 43 + visit_occrrence_columns = querier.list_columns("cdm_synthea10", "visit_occurrence") + assert len(visit_occrrence_columns) == 17 + assert "visit_occurrence_id" in visit_occrrence_columns diff --git a/tests/cycquery/test_eicu.py b/tests/cycquery/test_eicu.py new file mode 100644 index 0000000..ab6bc56 --- /dev/null +++ b/tests/cycquery/test_eicu.py @@ -0,0 +1,29 @@ +"""Test eICU query API.""" + +import pytest + +from cycquery import EICUQuerier + + +@pytest.mark.integration_test() +def test_eicu_querier(): + """Test EICUQuerier on eICU-CRD.""" + querier = EICUQuerier( + database="eicu", + user="postgres", + password="pwd", + ) + + patients = querier.eicu_crd.patient().run(limit=10) + assert len(patients) == 10 + assert "age" in patients + + diagnoses = querier.eicu_crd.diagnosis().run(limit=10) + assert len(diagnoses) == 10 + assert "diagnosisstring" in diagnoses + + vital_periods = querier.eicu_crd.vitalperiodic().run(limit=10) + assert "heartrate" in vital_periods + + vital_aperiodic = querier.eicu_crd.vitalaperiodic().run(limit=10) + assert "pvri" in vital_aperiodic diff --git a/tests/cycquery/test_interface.py b/tests/cycquery/test_interface.py new file mode 100644 index 0000000..4385edd --- /dev/null +++ b/tests/cycquery/test_interface.py @@ -0,0 +1,81 @@ +"""Test functions for interface module in query package.""" + +import os +import shutil +from unittest.mock import patch + +import dask.dataframe as dd +import pandas as pd +import pytest + +from cycquery.interface import QueryInterface +from cycquery.omop import OMOPQuerier + + +@pytest.fixture() +def test_data(): + """Dummy dataframe for testing.""" + return pd.DataFrame([[1, "a", 1], [5.1, "b", 0]], columns=["col1", "col2", "col3"]) + + +@patch("cycquery.orm.Database") +@patch("sqlalchemy.sql.selectable.Subquery") +def test_query_interface( + database, + query, + test_data, +): + """Test QueryInterface.""" + query_interface = QueryInterface(database, query) + query_interface.run() + + query_interface._data = test_data + path = os.path.join("test_save", "test_features.parquet") + query_interface.save(path) + loaded_data = pd.read_parquet(path) + assert loaded_data.equals(test_data) + shutil.rmtree("test_save") + query_interface.clear_data() + assert not query_interface.data + + with pytest.raises(ValueError): + query_interface.save(path, file_format="donkey") + + +@pytest.mark.integration_test() +def test_query_interface_integration(): + """Test QueryInterface with OMOPQuerier.""" + synthea = OMOPQuerier( + database="synthea_integration_test", + schema_name="cdm_synthea10", + user="postgres", + password="pwd", + ) + visits = synthea.visit_occurrence() + assert isinstance(visits, QueryInterface) + visits_pd_df = visits.run() + assert isinstance(visits_pd_df, pd.DataFrame) + assert visits_pd_df.shape[0] > 0 + visits_dd_df = visits.run(backend="dask", index_col="visit_occurrence_id") + assert isinstance(visits_dd_df, dd.DataFrame) + assert ( + "visit_occurrence_id" in visits_dd_df.columns + ) # reset index and keep index column + assert visits_dd_df.shape[0].compute() > 0 + visits_dd_df = visits.run( + backend="dask", + index_col="visit_occurrence_id", + n_partitions=2, + ) + assert isinstance(visits_dd_df, dd.DataFrame) + assert visits_dd_df.npartitions == 2 + vistit_ids_0 = visits_dd_df.partitions[0].compute()["visit_occurrence_id"] + vistit_ids_1 = visits_dd_df.partitions[1].compute()["visit_occurrence_id"] + # check that the partitions don't overlap + assert len(set(vistit_ids_0).intersection(set(vistit_ids_1))) == 0 + + # test running a query using SQL string + synthea_db = visits.database + visits_df = synthea_db.run_query("SELECT * FROM cdm_synthea10.visit_occurrence") + assert isinstance(visits_df, pd.DataFrame) + assert visits_df.shape[0] > 0 diff --git a/tests/cycquery/test_mimiciii.py b/tests/cycquery/test_mimiciii.py new file mode 100644 index 0000000..84e7e5d --- /dev/null +++ b/tests/cycquery/test_mimiciii.py @@ -0,0 +1,34 @@ +"""Test MIMICIII query API.""" + +import pytest + +from cycquery import MIMICIIIQuerier + + +@pytest.mark.integration_test() +def test_mimiciii_querier(): + """Test MIMICIIIQuerier.""" + querier = MIMICIIIQuerier( + dbms="postgresql", + port=5432, + host="localhost", + database="mimiciii", + user="postgres", + password="pwd", + ) + custom_tables = querier.list_custom_tables() + assert "diagnoses" in custom_tables + assert "labevents" in custom_tables + assert "chartevents" in custom_tables + + diagnoses = querier.diagnoses().run(limit=10) + assert len(diagnoses) == 10 + assert "long_title" in diagnoses + + labevents = querier.labevents().run(limit=10) + assert len(labevents) == 10 + assert "itemid" in labevents + + chartevents = querier.chartevents().run(limit=10) + assert len(chartevents) == 10 + assert "itemid" in chartevents diff --git a/tests/cycquery/test_mimiciv.py b/tests/cycquery/test_mimiciv.py new file mode 100644 index 0000000..89cade2 --- /dev/null +++ b/tests/cycquery/test_mimiciv.py @@ -0,0 +1,39 @@ +"""Test MIMICIV-2.0 query API.""" + +import pytest + +from cycquery import MIMICIVQuerier + + +@pytest.mark.integration_test() +def test_mimiciv_querier(): + """Test MIMICQuerier on MIMICIV-2.0.""" + querier = MIMICIVQuerier( + database="mimiciv-2.0", + user="postgres", + password="pwd", + ) + + patients = querier.patients().run(limit=10) + assert len(patients) == 10 + assert "anchor_year_difference" in patients + + diagnoses = querier.diagnoses().run(limit=10) + assert len(diagnoses) == 10 + assert "long_title" in diagnoses + + lab_events = querier.labevents().run(limit=10) + assert "category" in lab_events + + chart_events = querier.chartevents().run(limit=10) + assert "value" in chart_events + assert "category" in chart_events + + custom_tables = querier.list_custom_tables() + assert "patients" in custom_tables + assert "diagnoses" in custom_tables + assert "labevents" in custom_tables + assert "chartevents" in custom_tables + + with pytest.raises(AttributeError): + querier.get_table("invalid_schema", "invalid_table") diff --git a/tests/cycquery/test_omop.py b/tests/cycquery/test_omop.py new file mode 100644 index 0000000..92e13db --- /dev/null +++ b/tests/cycquery/test_omop.py @@ -0,0 +1,51 @@ +"""Test OMOP query API.""" + +import pytest + +import cycquery.ops as qo +from cycquery import OMOPQuerier + + +@pytest.mark.integration_test() +def test_omop_querier_synthea(): + """Test OMOPQuerier on synthea data.""" + querier = OMOPQuerier( + database="synthea_integration_test", + schema_name="cdm_synthea10", + user="postgres", + password="pwd", + ) + ops = qo.Sequential( + qo.ConditionEquals("gender_source_value", "M"), + qo.Rename({"race_source_value": "race"}), + ) + persons = querier.person() + persons = persons.ops(ops) + visits = querier.visit_occurrence() + visits = visits.join(persons, "person_id").run() + persons = persons.run() + observations = querier.observation().run() + measurements = querier.measurement().run() + visit_details = querier.visit_detail().run() + providers = querier.cdm_synthea10.provider().run() + conditions = querier.cdm_synthea10.condition_occurrence().run() + assert len(persons) == 54 + assert len(visits) == 1798 + assert len(visit_details) == 4320 + assert len(observations) == 17202 + assert len(measurements) == 19994 + assert len(providers) == 212 + assert len(conditions) == 1419 + + +@pytest.mark.integration_test() +def test_omop_querier_mimiciii(): + """Test OMOPQuerier on MIMICIII data.""" + querier = OMOPQuerier( + database="mimiciii", + schema_name="omop", + user="postgres", + password="pwd", + ) + visits = querier.visit_occurrence().run() + assert len(visits) == 58976 diff --git a/tests/cycquery/test_ops.py b/tests/cycquery/test_ops.py new file mode 100644 index 0000000..958875e --- /dev/null +++ b/tests/cycquery/test_ops.py @@ -0,0 +1,655 @@ +"""Test low-level query API processing functions.""" + +from math import isclose + +import pandas as pd +import pytest +from sqlalchemy import column, select + +from cycquery.omop import OMOPQuerier +from cycquery.ops import ( + AddColumn, + AddNumeric, + And, + Apply, + Cast, + ConditionAfterDate, + ConditionBeforeDate, + ConditionEndsWith, + ConditionEquals, + ConditionGreaterThan, + ConditionIn, + ConditionInMonths, + ConditionInYears, + ConditionLessThan, + ConditionLike, + ConditionRegexMatch, + ConditionStartsWith, + ConditionSubstring, + Distinct, + Drop, + DropEmpty, + DropNulls, + ExtractTimestampComponent, + FillNull, + GroupByAggregate, + Limit, + Literal, + Or, + OrderBy, + QueryOp, + Rename, + ReorderAfter, + Sequential, + Substring, + Trim, + _addindent, + _none_add, + _process_checks, +) +from cycquery.util import process_column + + +QUERIER = OMOPQuerier( + database="synthea_integration_test", + user="postgres", + password="pwd", + schema_name="cdm_synthea10", +) + + +@pytest.fixture() +def table_input(): + """Test table input.""" + column_a = process_column(column("a"), to_timestamp=True) + return select(column_a, column("b"), column("c")) + + +@pytest.fixture() +def visits_table(): + """Test visits table input.""" + return QUERIER.visit_occurrence() + + +@pytest.fixture() +def measurements_table(): + """Test measurement table input.""" + return QUERIER.measurement() + + +def test__none_add(): + """Test _none_add fn.""" + assert _none_add("1", "2") == "12" + assert _none_add("1", None) == "1" + assert _none_add(None, "2") == "2" + + +def test__process_checks(table_input): + """Test _process_checks fn.""" + _process_checks(table_input, cols=["a"], cols_not_in=["d"], timestamp_cols=["a"]) + with pytest.raises(ValueError): + _process_checks(table_input, cols_not_in=["a"]) + + +class TestAddndent: + """Test _addindent fn.""" + + def test_addindent_multiple_lines(self): + """Test _addindent fn with multiple lines.""" + input_string = "This is a\nmultiline\nstring" + expected_output = "This is a\n multiline\n string" + assert _addindent(input_string, 4) == expected_output + + def test_addindent_single_line(self): + """Test _addindent fn with single line.""" + input_string = "This is a single line string" + assert _addindent(input_string, 4) == input_string + + +class TestQueryOp: + """Test QueryOp class.""" + + def test_add_child_operation(self): + """Test adding a child operation.""" + query_op = QueryOp() + child_op = QueryOp() + query_op._add_op("child", child_op) + assert query_op.child == child_op + + def test_get_query_op_name(self): + """Test getting the name of the query op.""" + query_op = QueryOp() + assert query_op._get_name() == "QueryOp" + + def test_set_attribute(self): + """Test setting an attribute of the query op.""" + query_op = QueryOp() + child_op = QueryOp() + query_op.child = child_op + assert query_op.child == child_op + + def test_string_representation(self): + """Test string representation of the query op.""" + query_op = QueryOp() + child_op = QueryOp() + query_op._add_op("child", child_op) + assert repr(query_op) == "QueryOp(\n (child): QueryOp()\n)" + + def test_add_child_operation_empty_name(self): + """Test adding a child operation with an empty name.""" + query_op = QueryOp() + child_op = QueryOp() + with pytest.raises(KeyError): + query_op._add_op("", child_op) + + def test_add_child_operation_dot_name(self): + """Test adding a child operation with a dot in the name.""" + query_op = QueryOp() + child_op = QueryOp() + with pytest.raises(KeyError): + query_op._add_op("child.name", child_op) + + +@pytest.mark.integration_test() +def test_drop(visits_table): + """Test Drop.""" + visits = visits_table.ops(Drop("care_site_source_value")).run() + assert "care_site_source_value" not in visits.columns + + +@pytest.mark.integration_test() +def test_fill_null(visits_table): + """Test FillNull.""" + visits_before = visits_table.run() + unique_before = visits_before["preceding_visit_occurrence_id"].unique() + visits_after = visits_table.ops( + FillNull(["preceding_visit_occurrence_id", "care_site_id"], 0), + ).run() + unique_after = visits_after["preceding_visit_occurrence_id"].unique() + assert visits_after["preceding_visit_occurrence_id"].isna().sum() == 0 + assert visits_after["care_site_id"].isna().sum() == 0 + assert 0 not in unique_before + assert len(unique_after) == len(unique_before) + assert len(visits_after["care_site_id"].unique()) == 1 + + visits_after = visits_table.ops( + FillNull( + ["preceding_visit_occurrence_id", "care_site_id"], + [0, -99], + ["col1", "col2"], + ), + ).run() + assert visits_after["preceding_visit_occurrence_id"].isna().sum() != 0 + assert visits_after["care_site_id"].isna().sum() != 0 + assert visits_after["col1"].isna().sum() == 0 + assert visits_after["col2"].isna().sum() == 0 + assert len(visits_after["col2"].unique()) == 1 + assert -99 in visits_after["col2"].unique() + + +@pytest.mark.integration_test() +def test_add_column(visits_table): + """Test AddColumn.""" + ops = Sequential( + Literal(2, "test_col1"), + Literal(3, "test_col2"), + AddColumn("test_col1", "test_col2", new_col_labels="test_col3"), + ) + visits = visits_table.ops(ops).run() + assert "test_col3" in visits.columns + assert (visits["test_col3"] == 5).all() + + ops = Sequential( + Literal(2, "test_col1"), + Literal(3, "test_col2"), + AddColumn( + "test_col1", + "test_col2", + negative=True, + new_col_labels="test_col3", + ), + ) + visits = visits_table.ops(ops).run() + assert "test_col3" in visits.columns + assert (visits["test_col3"] == -1).all() + + +@pytest.mark.integration_test() +def test_rename(visits_table): + """Test Rename.""" + rename_op = Rename({"care_site_name": "hospital_name"}) + visits = visits_table.ops(rename_op).run() + assert "hospital_name" in visits.columns + assert "care_site_name" not in visits.columns + + +@pytest.mark.integration_test() +def test_literal(visits_table): + """Test Literal.""" + literal_ops = Sequential(Literal(1, "new_col"), Literal("a", "new_col2")) + visits = visits_table.ops(literal_ops).run() + assert "new_col" in visits.columns + assert visits["new_col"].iloc[0] == 1 + assert "new_col2" in visits.columns + assert visits["new_col2"].iloc[0] == "a" + + +@pytest.mark.integration_test() +def test_reorder_after(visits_table): + """Test ReorderAfter.""" + reorder_op = ReorderAfter("visit_concept_name", "care_site_id") + visits = visits_table.ops(reorder_op).run() + assert list(visits.columns).index("care_site_id") + 1 == list(visits.columns).index( + "visit_concept_name", + ) + + +@pytest.mark.integration_test() +def test_limit(visits_table): + """Test Limit.""" + visits = visits_table.ops(Limit(10)).run() + assert len(visits) == 10 + + +@pytest.mark.integration_test() +def test_order_by(visits_table): + """Test OrderBy.""" + orderby_op = OrderBy("visit_concept_name") + visits = visits_table.ops(orderby_op).run() + assert visits["visit_concept_name"].is_monotonic_increasing + + +@pytest.mark.integration_test() +def test_substring(visits_table): + """Test Substring.""" + substring_op = Substring("visit_concept_name", 0, 3, "visit_concept_name_substr") + visits = visits_table.ops(substring_op).run() + assert visits["visit_concept_name_substr"].value_counts()["Ou"] == 4057 + + +@pytest.mark.integration_test() +def test_trim(visits_table): + """Test Trim.""" + trim_op = Trim("visit_concept_name", "visit_concept_name_trim") + visits = visits_table.ops(trim_op).run() + assert visits["visit_concept_name_trim"].value_counts()["Inpatient Visit"] == 108 + + +@pytest.mark.integration_test() +def test_extract_timestamp_component( + visits_table, +): + """Test ExtractTimestampComponent.""" + extract_ts_op = ExtractTimestampComponent( + "visit_start_date", + "year", + "visit_start_date_year", + ) + visits = visits_table.ops(extract_ts_op).run() + assert visits["visit_start_date_year"].value_counts()[2021] == 371 + + +@pytest.mark.integration_test() +def test_add_numeric(visits_table): + """Test AddNumeric.""" + ops = Sequential(Literal(1, "new_col"), AddNumeric("new_col", 1, "new_col_plus_1")) + visits = visits_table.ops(ops).run() + assert visits["new_col_plus_1"].iloc[0] == 2 + + +@pytest.mark.integration_test() +def test_apply(visits_table): + """Test Apply.""" + apply_op = Apply( + "visit_concept_name", + lambda x: x + "!", + "visit_concept_name_exclaim", + ) + visits = visits_table.ops(apply_op).run() + assert ( + visits["visit_concept_name_exclaim"].value_counts()["Outpatient Visit!"] == 4057 + ) + apply_op = Apply( + ["visit_occurrence_id", "person_id"], + lambda x, y: x + y, + "sum_id", + ) + visits = visits_table.ops(apply_op).run() + assert ( + visits["sum_id"].iloc[0] + == visits["visit_occurrence_id"].iloc[0] + visits["person_id"].iloc[0] + ) + assert visits["sum_id"].isna().sum() == visits["person_id"].isna().sum() + apply_op = Apply( + ["visit_occurrence_id", "person_id"], + [lambda x: x + 1, lambda x: x + 2], + ["sum_id", "sum_id2"], + ) + visits = visits_table.ops(apply_op).run() + assert visits["sum_id"].iloc[0] == visits["visit_occurrence_id"].iloc[0] + 1 + assert visits["sum_id2"].iloc[0] == visits["person_id"].iloc[0] + 2 + + +@pytest.mark.integration_test() +def test_condition_regex_match( + measurements_table, +): + """Test ConditionRegexMatch.""" + measurements_op = ConditionRegexMatch( + "value_source_value", + r"^[0-9]+(\.[0-9]+)?$", + binarize_col="value_source_value_match", + ) + measurements = measurements_table.ops(measurements_op).run() + assert "value_source_value_match" in measurements.columns + assert ( + measurements["value_source_value_match"].sum() + == measurements["value_source_value"].str.match(r"^[0-9]+(\.[0-9]+)?$").sum() + ) + + +@pytest.mark.integration_test() +def test_group_by_aggregate( + visits_table, + measurements_table, +): + """Test GroupByAggregate.""" + with pytest.raises(ValueError): + visits_table.ops( + GroupByAggregate("person_id", {"person_id": ("donkey", "visit_count")}), + ) + with pytest.raises(ValueError): + visits_table.ops( + GroupByAggregate("person_id", {"person_id": ("count", "person_id")}), + ) + + visits_count = visits_table.ops( + GroupByAggregate( + "person_id", + {"person_id": ("count", "num_visits")}, + ), + ).run() + visits_string_agg = visits_table.ops( + GroupByAggregate( + "person_id", + {"visit_concept_name": ("string_agg", "visit_concept_names")}, + {"visit_concept_name": ", "}, + ), + ).run() + measurements_sum = measurements_table.ops( + GroupByAggregate( + "person_id", + {"value_as_number": ("sum", "value_as_number_sum")}, + ), + ).run() + measurements_average = measurements_table.ops( + GroupByAggregate( + "person_id", + {"value_as_number": ("average", "value_as_number_average")}, + ), + ).run() + measurements_min = measurements_table.ops( + GroupByAggregate( + "person_id", + {"value_as_number": ("min", "value_as_number_min")}, + ), + ).run() + measurements_max = measurements_table.ops( + GroupByAggregate( + "person_id", + {"value_as_number": ("max", "value_as_number_max")}, + ), + ).run() + measurements_median = measurements_table.ops( + GroupByAggregate( + "person_id", + {"value_as_number": ("median", "value_as_number_median")}, + ), + ).run() + + assert "num_visits" in visits_count.columns + assert visits_count[visits_count["person_id"] == 33]["num_visits"][0] == 86 + assert "visit_concept_names" in visits_string_agg.columns + test_visit_concept_names = visits_string_agg[visits_string_agg["person_id"] == 33][ + "visit_concept_names" + ][0].split(",") + test_visit_concept_names = [item.strip() for item in test_visit_concept_names] + assert len(test_visit_concept_names) == 86 + assert "Outpatient Visit" in test_visit_concept_names + assert "value_as_number_sum" in measurements_sum.columns + assert ( + measurements_sum[measurements_sum["person_id"] == 33]["value_as_number_sum"][0] + == 9881.3 + ) + assert "value_as_number_average" in measurements_average.columns + assert isclose( + measurements_average[measurements_average["person_id"] == 33][ + "value_as_number_average" + ][0], + 75.42, + abs_tol=0.01, + ) + assert "value_as_number_min" in measurements_min.columns + assert ( + measurements_min[measurements_min["person_id"] == 33]["value_as_number_min"][0] + == 0.0 + ) + assert "value_as_number_max" in measurements_max.columns + assert ( + measurements_max[measurements_max["person_id"] == 33]["value_as_number_max"][0] + == 360.7 + ) + assert "value_as_number_median" in measurements_median.columns + assert ( + measurements_median[measurements_median["person_id"] == 33][ + "value_as_number_median" + ].item() + == 75.7 + ) + + +@pytest.mark.integration_test() +def test_drop_nulls(visits_table): + """Test DropNulls.""" + visits = visits_table.ops(DropNulls("preceding_visit_occurrence_id")).run() + assert visits["preceding_visit_occurrence_id"].isnull().sum() == 0 + + +@pytest.mark.integration_test() +def test_drop_empty(visits_table): + """Test DropEmpty.""" + visits = visits_table.ops(DropEmpty("visit_concept_name")).run() + assert (visits["visit_concept_name"] == "").sum() == 0 + + +@pytest.mark.integration_test() +def test_condition_before_date(visits_table): + """Test ConditionBeforeDate.""" + visits = visits_table.ops( + ConditionBeforeDate("visit_start_date", "2018-01-01"), + ).run() + assert pd.Timestamp(visits["visit_start_date"].max()) < pd.Timestamp("2018-01-01") + + +@pytest.mark.integration_test() +def test_condition_after_date(visits_table): + """Test ConditionAfterDate.""" + visits = visits_table.ops( + ConditionAfterDate("visit_start_date", "2018-01-01"), + ).run() + assert pd.Timestamp(visits["visit_start_date"].min()) > pd.Timestamp("2018-01-01") + + +@pytest.mark.integration_test() +def test_condition_in(visits_table): + """Test ConditionIn.""" + visits = visits_table.ops( + ConditionIn("visit_concept_name", ["Outpatient Visit"]), + ).run() + assert all(visits["visit_concept_name"] == "Outpatient Visit") + + +@pytest.mark.integration_test() +def test_condition_in_months(visits_table): + """Test ConditionInMonths.""" + ops = Sequential( + Cast("visit_start_date", "timestamp"), + ConditionInMonths("visit_start_date", 6), + ) + visits = visits_table.ops(ops).run() + assert (visits["visit_start_date"].dt.month == 6).all() + + +@pytest.mark.integration_test() +def test_condition_in_years(visits_table): + """Test ConditionInYears.""" + ops = Sequential( + Cast("visit_start_date", "timestamp"), + ConditionInYears("visit_start_date", 2018), + ) + visits = visits_table.ops(ops).run() + assert (visits["visit_start_date"].dt.year == 2018).all() + + +@pytest.mark.integration_test() +def test_condition_substring(visits_table): + """Test ConditionSubstring.""" + visits = visits_table.ops( + ConditionSubstring("visit_concept_name", "Outpatient"), + ).run() + assert all(visits["visit_concept_name"].str.contains("Outpatient")) + + +@pytest.mark.integration_test() +def test_condition_starts_with(visits_table): + """Test ConditionStartsWith.""" + visits = visits_table.ops( + ConditionStartsWith("visit_concept_name", "Outpatient"), + ).run() + assert all(visits["visit_concept_name"].str.startswith("Outpatient")) + + +@pytest.mark.integration_test() +def test_condition_ends_with(visits_table): + """Test ConditionEndsWith.""" + visits = visits_table.ops(ConditionEndsWith("visit_concept_name", "Visit")).run() + assert all(visits["visit_concept_name"].str.endswith("Visit")) + + +@pytest.mark.integration_test() +def test_condition_equals(visits_table): + """Test ConditionEquals.""" + visits = visits_table.ops( + ConditionEquals("visit_concept_name", "Outpatient Visit"), + ).run() + assert all(visits["visit_concept_name"] == "Outpatient Visit") + visits = visits_table.ops( + ConditionEquals("visit_concept_name", "Outpatient Visit", not_=True), + ).run() + assert all(visits["visit_concept_name"] != "Outpatient Visit") + + +@pytest.mark.integration_test() +def test_condition_greater_than(visits_table): + """Test ConditionGreaterThan.""" + visits = visits_table.ops(ConditionGreaterThan("visit_concept_id", 9300)).run() + assert all(visits["visit_concept_id"] > 9300) + + +@pytest.mark.integration_test() +def test_condition_less_than(visits_table): + """Test ConditionLessThan.""" + visits = visits_table.ops(ConditionLessThan("visit_concept_id", 9300)).run() + assert all(visits["visit_concept_id"] < 9300) + + +@pytest.mark.integration_test() +def test_union(visits_table): + """Test Union.""" + outpatient_filtered = visits_table.ops( + ConditionEquals("visit_concept_name", "Outpatient Visit"), + ) + emergency_filtered = visits_table.ops( + ConditionEquals("visit_concept_name", "Emergency Room Visit"), + ) + visits = emergency_filtered.union(outpatient_filtered).run() + assert len(visits) == 4212 + assert all( + visits["visit_concept_name"].isin(["Outpatient Visit", "Emergency Room Visit"]), + ) + visits = emergency_filtered.union_all(emergency_filtered).run() + assert len(visits) == 310 + + +@pytest.mark.integration_test() +def test_sequential(visits_table): + """Test Sequential.""" + substr_op = Substring("visit_concept_name", 0, 4, "visit_concept_name_substr") + operations = [ + Literal(33, "const"), + Rename({"care_site_name": "hospital_name"}), + Apply("visit_concept_name", lambda x: x + "!", "visit_concept_name_exclaim"), + OrderBy(["person_id", "visit_start_date"]), + substr_op, + ] + sequential_ops = Sequential(operations) + visits = visits_table.ops(sequential_ops).run() + assert "hospital_name" in visits.columns + assert "visit_concept_name_exclaim" in visits.columns + assert list(visits[visits["person_id"] == 33]["visit_concept_name_exclaim"])[0] == ( + "Outpatient Visit!" + ) + assert "visit_concept_name_substr" in visits.columns + assert list(visits[visits["person_id"] == 33]["visit_concept_name_substr"])[0] == ( + "Out" + ) + + +@pytest.mark.integration_test() +def test_or(visits_table): + """Test Or.""" + or_op = Or( + ConditionEquals("visit_concept_name", "Outpatient Visit"), + ConditionLike("visit_concept_name", "%Emergency%"), + ) + visits = visits_table.ops(or_op).run() + assert len(visits) == 4212 + assert all( + visits["visit_concept_name"].isin(["Outpatient Visit", "Emergency Room Visit"]), + ) + + +@pytest.mark.integration_test() +def test_and(visits_table): + """Test And.""" + and_op = And( + [ + ConditionEquals("visit_concept_name", "Outpatient Visit"), + ConditionLike("visit_concept_name", "%Emergency%", not_=True), + ], + ) + visits = visits_table.ops(and_op).run() + assert len(visits) == 4057 + and_op = And( + ConditionEquals("visit_concept_name", "Outpatient Visit"), + ConditionLike("visit_concept_name", "%Emergency%", not_=True), + ) + visits = visits_table.ops(and_op).run() + assert len(visits) == 4057 + + +@pytest.mark.integration_test() +def test_distinct(visits_table): + """Test Distinct.""" + distinct_op = Distinct(["person_id"]) + visits = visits_table.ops(distinct_op).run() + assert len(visits) == 109 + + +@pytest.mark.integration_test() +def test_condition_like(visits_table): + """Test ConditionLike.""" + like_op = ConditionLike("visit_concept_name", "Outpatient%") + visits = visits_table.ops(like_op).run() + assert len(visits) == 4057 + assert all(visits["visit_concept_name"].str.startswith("Outpatient")) diff --git a/tests/cycquery/test_orm.py b/tests/cycquery/test_orm.py new file mode 100644 index 0000000..794081c --- /dev/null +++ b/tests/cycquery/test_orm.py @@ -0,0 +1,31 @@ +"""Test cyclops.query.orm module.""" + +import os + +import pandas as pd +import pytest + +from cycquery import OMOPQuerier + + +@pytest.mark.integration_test() +def test_omop_querier(): + """Test ORM using OMOPQuerier.""" + querier = OMOPQuerier( + database="synthea_integration_test", + schema_name="cdm_synthea10", + user="postgres", + password="pwd", + ) + assert querier is not None + db_ = querier.db + visits_query = querier.visit_occurrence().query + db_.save_query_to_csv(visits_query, "visits.csv") + visits_df = pd.read_csv("visits.csv") + assert len(visits_df) == 4320 + os.remove("visits.csv") + + db_.save_query_to_parquet(visits_query, "visits.parquet") + visits_df = pd.read_parquet("visits.parquet") + assert len(visits_df) == 4320 + os.remove("visits.parquet") diff --git a/tests/cycquery/test_util.py b/tests/cycquery/test_util.py new file mode 100644 index 0000000..315e27e --- /dev/null +++ b/tests/cycquery/test_util.py @@ -0,0 +1,208 @@ +"""Test query API util functions.""" + +import pytest +from sqlalchemy import Table, column, select +from sqlalchemy.sql.selectable import Select, Subquery +from sqlalchemy.types import Integer + +from cycquery.util import ( + DBTable, + _check_column_type, + _to_select, + _to_subquery, + drop_columns, + ends_with, + equals, + filter_columns, + get_column, + get_column_names, + get_columns, + greater_than, + has_columns, + less_than, + not_equals, + process_column, + process_elem, + process_list, + rename_columns, + reorder_columns, + starts_with, + table_params_to_type, + trim_columns, +) + + +@pytest.fixture() +def test_table(): + """Test table input.""" + return select(process_column(column("a"), to_int=True), column("b"), column("c")) + + +def test__check_column_type(test_table): + """Test _check_column_type fn.""" + assert _check_column_type(test_table, ["a"], Integer) + with pytest.raises(ValueError): + assert _check_column_type(test_table, ["b"], Integer, raise_error=True) + + +def test_ends_with(): + """Test ends_with.""" + test_col = column("a") + assert ( + str(ends_with(test_col, "a")) == "trim(lower(CAST(a AS VARCHAR))) LIKE :trim_1" + ) + + +def test_starts_with(): + """Test starts_with fn.""" + test_col = column("a") + assert ( + str(starts_with(test_col, "a")) + == "trim(lower(CAST(a AS VARCHAR))) LIKE :trim_1" + ) + + +def test__to_subquery(): + """Test _to_subquery fn.""" + assert isinstance(_to_subquery(select().subquery()), Subquery) + assert isinstance(_to_subquery(select()), Subquery) + assert isinstance(_to_subquery(Table()), Subquery) + assert isinstance(_to_subquery(DBTable("a", Table())), Subquery) + with pytest.raises(TypeError): + _to_subquery("a") + with pytest.raises(ValueError): + table_params_to_type(int) + + +def test__to_select(): + """Test _to_select fn.""" + assert isinstance(_to_select(select().subquery()), Select) + assert isinstance(_to_select(select()), Select) + assert isinstance(_to_select(Table()), Select) + assert isinstance(_to_select(DBTable("a", Table())), Select) + with pytest.raises(TypeError): + _to_select("a") + + +def test_get_column(test_table): + """Test get_column fn.""" + assert str(get_column(test_table, "a")) == "anon_1.a" + with pytest.raises(ValueError): + get_column(select(column("a")), "b") + + +def test_get_columns(test_table): + """Test get_columns fn.""" + cols = get_columns(test_table, "c") + cols = [str(col) for col in cols] + assert cols == ["anon_1.c"] + with pytest.raises(ValueError): + get_column(select(column("a")), "b") + + +def test_get_column_names(test_table): + """Test get_column_names fn.""" + assert get_column_names(test_table) == ["a", "b", "c"] + + +def test_filter_columns(test_table): + """Test filter_columns fn.""" + filtered = filter_columns(test_table, ["a", "c", "d"]) + assert get_column_names(filtered) == ["a", "c"] + + +def test_has_columns(test_table): + """Test has_columns fn.""" + assert not has_columns(test_table, ["a", "d"]) + assert has_columns(test_table, ["a", "b"]) + with pytest.raises(ValueError): + has_columns(test_table, ["a", "d"], raise_error=True) + + +def test_drop_columns(test_table): + """Test drop_columns fn.""" + after_drop = drop_columns(test_table, ["a"]) + assert get_column_names(after_drop) == ["b", "c"] + + +def test_rename_columns(test_table): + """Test rename_columns fn.""" + after_rename = rename_columns(test_table, {"a": "apple", "b": "ball"}) + assert get_column_names(after_rename) == ["apple", "ball", "c"] + + +def test_reorder_columns(test_table): + """Test reorder_columns fn.""" + with pytest.raises(ValueError): + reorder_columns(test_table, ["ball", "c", "a"]) + with pytest.raises(ValueError): + reorder_columns(test_table, ["c", "a"]) + after_reorder = reorder_columns(test_table, ["b", "c", "a"]) + assert get_column_names(after_reorder) == ["b", "c", "a"] + + +def test_trim_columns(test_table): + """Test apply_to_columns fn.""" + after_trim = trim_columns(test_table, ["a"], ["apple"]) + assert get_column_names(after_trim) == ["a", "b", "c", "apple"] + + +def test_process_elem(): + """Test process_elem fn.""" + assert process_elem("Test", lower=True) == "test" + assert process_elem("Test ", lower=True, trim=True) == "test" + assert process_elem("1", to_int=True) == 1 + assert process_elem("1.2", to_float=True) == 1.2 + assert process_elem(1, to_bool=True) is True + assert process_elem(0, to_bool=True) is False + + +def test_process_list(): + """Test process_list fn.""" + assert process_list([1, 2, 3, 0], to_bool=True) == [True, True, True, False] + + +def test_process_column(): + """Test process_column fn.""" + test_col = column("a") + processed_col = process_column(test_col, to_int=True) + assert str(processed_col) == "CAST(a AS INTEGER)" + processed_col = process_column(test_col, to_float=True) + assert str(processed_col) == "CAST(a AS FLOAT)" + processed_col = process_column(test_col, to_str=True) + assert str(processed_col) == "CAST(a AS VARCHAR)" + processed_col = process_column(test_col, to_bool=True) + assert str(processed_col) == "CAST(a AS BOOLEAN)" + processed_col = process_column(test_col, to_date=True) + assert str(processed_col) == "CAST(a AS DATE)" + processed_col = process_column(test_col, to_timestamp=True) + assert str(processed_col) == "CAST(a AS DATETIME)" + test_col.type = "VARCHAR" + processed_col = process_column(test_col, lower=True, trim=True) + assert str(processed_col) == "trim(lower(a))" + + +def test_equals(): + """Test equals fn.""" + test_col = column("a") + assert str(equals(test_col, "bat")) == "a = :a_1" + + +def test_greater_than(): + """Test greater_than fn.""" + test_col = column("a") + assert str(greater_than(test_col, 1)) == "a > :a_1" + assert str(greater_than(test_col, 1, equal=True)) == "a >= :a_1" + + +def test_less_than(): + """Test less_than fn.""" + test_col = column("a") + assert str(less_than(test_col, 1)) == "a < :a_1" + assert str(less_than(test_col, 1, equal=True)) == "a <= :a_1" + + +def test_not_equals(): + """Test not_equals fn.""" + test_col = column("a") + assert str(not_equals(test_col, "bat")) == "a != :a_1"