diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt index fc43605..edd1795 100644 --- a/.github/workflows/constraints.txt +++ b/.github/workflows/constraints.txt @@ -1,4 +1,4 @@ pip==24.0 -nox==2024.3.2 +nox==2024.4.15 nox-poetry==1.0.3 poetry==1.8.2 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c858081..fb1d701 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -219,4 +219,4 @@ jobs: nox --session=coverage -- xml - name: Upload coverage report - uses: codecov/codecov-action@v4.1.1 + uses: codecov/codecov-action@v4.3.1 diff --git a/docs/requirements.txt b/docs/requirements.txt index c14d54a..0678023 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ furo==2024.1.29 -sphinx==7.2.6 +sphinx==7.3.7 sphinx-click==5.1.0 -myst_parser==2.0.0 +myst_parser==3.0.1 sphinxcontrib-mermaid==0.9.2 -pytest==8.1.1 +pytest==8.2.0 pytest-asyncio==0.23.6 diff --git a/poetry.lock b/poetry.lock index 30369e4..d7e8041 100644 --- a/poetry.lock +++ b/poetry.lock @@ -234,33 +234,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.4.0" +version = "24.4.2" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, - {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, - {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, - {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, - {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, - {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, - {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, - {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, - {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, - {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, - {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, - {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, - {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, - {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, - {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, - {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, - {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, - {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, - {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, - {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, - {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, - {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -618,13 +618,13 @@ files = [ [[package]] name = "docutils" -version = "0.21.1" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" files = [ - {file = "docutils-0.21.1-py3-none-any.whl", hash = "sha256:14c8d34a55b46c88f9f714adb29cefbdd69fb82f3fef825e59c5faab935390d8"}, - {file = "docutils-0.21.1.tar.gz", hash = "sha256:65249d8a5345bc95e0f40f280ba63c98eb24de35c6c8f5b662e3e8948adea83f"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] @@ -641,15 +641,50 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "fastapi" +version = "0.110.3" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.110.3-py3-none-any.whl", hash = "sha256:fd7600612f755e4050beb74001310b5a7e1796d149c2ee363124abdfa0289d32"}, + {file = "fastapi-0.110.3.tar.gz", hash = "sha256:555700b0159379e94fdbfc6bb66a0f1c43f4cf7060f25239af3d84b63a656626"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.37.2,<0.38.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-utils" +version = "0.2.1" +description = "Reusable utilities for FastAPI" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "fastapi-utils-0.2.1.tar.gz", hash = "sha256:0e6c7fc1870b80e681494957abf65d4f4f42f4c7f70005918e9181b22f1bd759"}, + {file = "fastapi_utils-0.2.1-py3-none-any.whl", hash = "sha256:dd0be7dc7f03fa681b25487a206651d99f2330d5a567fb8ab6cb5f8a06a29360"}, +] + +[package.dependencies] +fastapi = "*" +pydantic = ">=1.0,<2.0" +sqlalchemy = ">=1.3.12,<2.0.0" + [[package]] name = "filelock" -version = "3.13.4" +version = "3.14.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, ] [package.extras] @@ -675,13 +710,13 @@ pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "flake8-bugbear" -version = "24.4.21" +version = "24.4.26" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8_bugbear-24.4.21-py3-none-any.whl", hash = "sha256:58581060a1650f4b11344795db8a4934867d4450486319ece86d7720a9414036"}, - {file = "flake8_bugbear-24.4.21.tar.gz", hash = "sha256:d1a87b8f6ca1ed28772c36515f751ea3709e041d78bca60590a570b9cb802e55"}, + {file = "flake8_bugbear-24.4.26-py3-none-any.whl", hash = "sha256:cb430dd86bc821d79ccc0b030789a9c87a47a369667f12ba06e80f11305e8258"}, + {file = "flake8_bugbear-24.4.26.tar.gz", hash = "sha256:ff8d4ba5719019ebf98e754624c30c05cef0dadcf18a65d91c7567300e52a130"}, ] [package.dependencies] @@ -813,13 +848,13 @@ files = [ [[package]] name = "furo" -version = "2024.1.29" +version = "2024.4.27" description = "A clean customisable Sphinx documentation theme." optional = false python-versions = ">=3.8" files = [ - {file = "furo-2024.1.29-py3-none-any.whl", hash = "sha256:3548be2cef45a32f8cdc0272d415fcb3e5fa6a0eb4ddfe21df3ecf1fe45a13cf"}, - {file = "furo-2024.1.29.tar.gz", hash = "sha256:4d6b2fe3f10a6e36eb9cc24c1e7beb38d7a23fc7b3c382867503b7fcac8a1e02"}, + {file = "furo-2024.4.27-py3-none-any.whl", hash = "sha256:f7eb1b2c2204fd9cbd4af42e027289a67f17a98a4e14f4f9e2f17b96d61bb020"}, + {file = "furo-2024.4.27.tar.gz", hash = "sha256:15a9b65269038def2cefafb86c71c6616e3969b8f07ba231f588c10c4aee6d88"}, ] [package.dependencies] @@ -828,6 +863,77 @@ pygments = ">=2.7" sphinx = ">=6.0,<8.0" sphinx-basic-ng = "*" +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + [[package]] name = "gridworks-cert" version = "0.4.3" @@ -848,18 +954,21 @@ xdg = ">=6.0.0" [[package]] name = "gridworks-protocol" -version = "0.7.4" +version = "0.7.5" description = "Gridworks Protocol" optional = false -python-versions = "*" -files = [] -develop = false +python-versions = "<4.0,>=3.10" +files = [ + {file = "gridworks_protocol-0.7.5-py3-none-any.whl", hash = "sha256:56061dba09dbcdd29a1267a07407de4d0e069dd03a3b7e2ba5d7a0792e595e7a"}, + {file = "gridworks_protocol-0.7.5.tar.gz", hash = "sha256:a29a763694a86d49de42da5712e5cd8340e35a048f38ec8db855ab6981426506"}, +] -[package.source] -type = "git" -url = "https://github.com/thegridelectric/gridworks-protocol.git" -reference = "as/listen" -resolved_reference = "734fdf3121f818ecbf6458eb9ec4d30d39a17bd8" +[package.dependencies] +fastapi-utils = ">=0.2.1,<0.3.0" +pendulum = "2.1.2" +pydantic = ">=1.10.11,<2.0.0" +pytz = ">=2024.1,<2025.0" +yarl = ">=1.9.2,<2.0.0" [[package]] name = "h11" @@ -1207,38 +1316,38 @@ files = [ [[package]] name = "mypy" -version = "1.9.0" +version = "1.10.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, ] [package.dependencies] @@ -1265,13 +1374,13 @@ files = [ [[package]] name = "myst-parser" -version = "3.0.0" +version = "3.0.1" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," optional = false python-versions = ">=3.8" files = [ - {file = "myst_parser-3.0.0-py3-none-any.whl", hash = "sha256:8ee926557b8e4c2940a1e62c5720e1667cfaf8480b94b1b9c77dc38e31d104aa"}, - {file = "myst_parser-3.0.0.tar.gz", hash = "sha256:0b4ae0b33a45800a748260cb40348c37089a8a456c35120609240bd1b32f9255"}, + {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, + {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, ] [package.dependencies] @@ -1367,6 +1476,40 @@ files = [ {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, ] +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + [[package]] name = "pep8-naming" version = "0.13.3" @@ -1564,13 +1707,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.1.1" +version = "8.2.0" description = "pytest: simple powerful testing with Python" optional = true python-versions = ">=3.8" files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, + {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, ] [package.dependencies] @@ -1578,11 +1721,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.4,<2.0" +pluggy = ">=1.5,<2.0" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -1602,6 +1745,20 @@ pytest = ">=7.0.0,<9" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "python-dotenv" version = "1.0.1" @@ -1616,6 +1773,28 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyupgrade" version = "3.15.2" @@ -1857,6 +2036,17 @@ files = [ {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, ] +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -2085,6 +2275,85 @@ lint = ["docutils-stubs", "flake8", "mypy"] standalone = ["Sphinx (>=5)"] test = ["pytest"] +[[package]] +name = "sqlalchemy" +version = "1.4.52" +description = "Database Abstraction Library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, + {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] + [[package]] name = "starlette" version = "0.37.2" @@ -2118,13 +2387,13 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "textual" -version = "0.57.1" +version = "0.58.1" description = "Modern Text User Interface framework" optional = true python-versions = "<4.0,>=3.8" files = [ - {file = "textual-0.57.1-py3-none-any.whl", hash = "sha256:7a0a660525b207dd2ef5d95b7b9585f13d59ec0112de4c8f0a8dd0ffb2b9a6c4"}, - {file = "textual-0.57.1.tar.gz", hash = "sha256:91029212b28c0cc73adad1e8b9fcda0b53947d4d81a1e8df4efbcb1346638744"}, + {file = "textual-0.58.1-py3-none-any.whl", hash = "sha256:9902ebb4b00481f6fdb0e7db821c007afa45797d81e1d0651735a07de25ece87"}, + {file = "textual-0.58.1.tar.gz", hash = "sha256:3a01be0b583f2bce38b8e9786b75ed33dddc816bba502d8e7a9ca3ca2ead3957"}, ] [package.dependencies] @@ -2277,13 +2546,13 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "virtualenv" -version = "20.26.0" +version = "20.26.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.0-py3-none-any.whl", hash = "sha256:0846377ea76e818daaa3e00a4365c018bc3ac9760cbb3544de542885aad61fb3"}, - {file = "virtualenv-20.26.0.tar.gz", hash = "sha256:ec25a9671a5102c8d2657f62792a27b48f016664c6873f6beed3800008577210"}, + {file = "virtualenv-20.26.1-py3-none-any.whl", hash = "sha256:7aa9982a728ae5892558bff6a2839c00b9ed145523ece2274fad6f414690ae75"}, + {file = "virtualenv-20.26.1.tar.gz", hash = "sha256:604bfdceaeece392802e6ae48e69cec49168b9c5f4a44e483963f9242eb0e78b"}, ] [package.dependencies] @@ -2621,4 +2890,4 @@ tests = ["gridworks-cert", "pytest", "pytest-asyncio"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "244425173885d8e95f23e2cc32c45d4097cc3367f6e9202834f39b91f63b3919" +content-hash = "db7633f6f85460f5ac9733fe2aee4cefe7c34e27e24ab178104ba635b597a3b5" diff --git a/pyproject.toml b/pyproject.toml index b63d031..c33e575 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gridworks-proactor" -version = "0.4.3" +version = "0.4.4" description = "Gridworks Proactor" authors = ["Jessica Millar "] license = "MIT" @@ -35,6 +35,7 @@ gridworks-cert = {version = ">=0.4.2", optional = true} aiohttp = "^3.8.5" yarl = "^1.9.2" multidict = "^6.0.4" +pendulum = "2.1.2" [tool.poetry.dev-dependencies] Pygments = ">=2.10.0" diff --git a/src/gwproactor/links/link_manager.py b/src/gwproactor/links/link_manager.py index 492c54b..8c008a4 100644 --- a/src/gwproactor/links/link_manager.py +++ b/src/gwproactor/links/link_manager.py @@ -50,6 +50,9 @@ from gwproactor.message import MQTTDisconnectPayload from gwproactor.message import MQTTReceiptPayload from gwproactor.message import MQTTSubackPayload +from gwproactor.persister import ByteDecodingError +from gwproactor.persister import DecodingError +from gwproactor.persister import FileEmptyWarning from gwproactor.persister import JSONDecodingError from gwproactor.persister import PersisterInterface from gwproactor.persister import UIDMissingWarning @@ -92,7 +95,6 @@ def __init__( self._stats = stats self._event_persister = event_persister self._reuploads = Reuploads( - self._event_persister, self._logger, self._settings.num_initial_event_reuploads, ) @@ -208,6 +210,9 @@ def log_subscriptions(self, tag=""): s += f"\t\t[{subscription}]\n" self._logger.lifecycle(s) + def get_reuploads_str(self, verbose: bool = True, num_events: int = 5) -> str: + return self._reuploads.get_str(verbose=verbose, num_events=num_events) + def publish_message( self, client, message: Message, qos: int = 0, context: Any = None ) -> MQTTMessageInfo: @@ -253,61 +258,140 @@ def generate_event(self, event: EventT) -> Result[bool, BaseException]: ) def _start_reupload(self) -> None: - self._logger.path("++_start_reupload reuploading: %s", self.reuploading()) - path_dbg = 0 if not self._reuploads.reuploading(): + self._continue_reupload( + self._reuploads.start_reupload(self._event_persister.pending()) + ) + + def _continue_reupload(self, event_ids: list[str]) -> None: + self._logger.path("++_continue_reupload %d", len(event_ids)) + path_dbg = 0 + tried_count_dbg = 0 + sent_count_dbg = 0 + continuation_count_dbg = -1 + + if event_ids: path_dbg |= 0x00000001 - events_to_reupload = self._reuploads.start_reupload() - self._reupload_events(events_to_reupload) - if self._logger.isEnabledFor(logging.INFO): - path_dbg |= 0x00000002 - if self._reuploads.reuploading(): - path_dbg |= 0x00000004 - state_str = f"{self._reuploads.num_reupload_pending} reupload events pending." - else: - path_dbg |= 0x00000008 - state_str = "reupload complete." - self._logger.info( - f"_start_reupload: reuploaded {len(events_to_reupload)} events. " - f"{state_str} " - f"Total pending events: {self._event_persister.num_pending}." - ) + sent_one = False + # Try to send all requested events. At least send must succeed to + # continue the reupload, so if all sends fail, get more until + # one is sent or there are no more reuploads. + while not sent_one and self._reuploads.reuploading() and event_ids: + continuation_path_dbg = 0x00000002 + continuation_count_dbg += 1 + next_event_ids = [] + for event_id in event_ids: + event_path_dbg = 0x00000004 + tried_count_dbg += 1 + problems = Problems() + ret = self._reupload_event(event_id) + if ret.is_ok(): + event_path_dbg |= 0x00000008 + if ret.value: + path_dbg |= 0x00000010 + sent_count_dbg += 1 + sent_one = True + else: + event_path_dbg |= 0x00000020 + problems.add_error(DecodingError(uid=event_id)) + else: + event_path_dbg |= 0x00000040 + problems.add_problems(ret.err()) + if problems: + event_path_dbg |= 0x00000080 + # There was some error decoding this event. + # We generate a new event with information + # about decoding failure and delete this event. + self.generate_event( + problems.problem_event( + f"Event decoding error - uid:{event_id}" + ) + ) + self._event_persister.clear(event_id) + if sent_one: + event_path_dbg |= 0x00000100 + self._reuploads.clear_unacked_event(event_id) + else: + event_path_dbg |= 0x00000200 + next_event_ids.extend( + self._reuploads.process_ack_for_reupload(event_id) + ) + self._logger.path(" 1 event path:0x%08X", event_path_dbg) + continuation_path_dbg |= event_path_dbg + self._logger.path(" 1 continuation path:0x%08X", continuation_path_dbg) + event_ids = next_event_ids + path_dbg |= continuation_path_dbg self._logger.path( - "--_start_reupload reuploading: %s path:0x%08X", - self.reuploading(), + "--_continue_reupload path:0x%08X sent:%d tried:%d continuations:%d", path_dbg, + sent_count_dbg, + tried_count_dbg, + continuation_count_dbg, ) - def _reupload_events(self, event_ids: list[str]) -> Result[bool, BaseException]: - errors = [] - for message_id in event_ids: - match self._event_persister.retrieve(message_id): - case Ok(event_bytes): - if event_bytes is None: - errors.append( - UIDMissingWarning("reupload_events", uid=message_id) + def _reupload_event(self, event_id) -> Result[bool, Problems]: + """Load event for event_id from storage, decoded to JSON and send it. + + Return either Ok(True) or Err(Problems(list of decoding errors)). + + Send errors handled either by exception, which will propagate up, or + by ack timeout. + """ + self._logger.path("++_reupload_event %s", event_id) + path_dbg = 0 + problems = Problems() + match self._event_persister.retrieve(event_id): + case Ok(event_bytes): + path_dbg |= 0x00000001 + if event_bytes is None: + path_dbg |= 0x00000002 + problems.add_error( + UIDMissingWarning("reupload_events", uid=event_id) + ) + elif len(event_bytes) == 0: + path_dbg |= 0x00000004 + problems.add_error( + FileEmptyWarning("reupload_events", uid=event_id) + ) + else: + path_dbg |= 0x00000008 + try: + event_str = event_bytes.decode(encoding=self.PERSISTER_ENCODING) + except BaseException as e: + path_dbg |= 0x00000010 + problems.add_error(e).add_error( + ByteDecodingError("reupload_events", uid=event_id) ) else: + path_dbg |= 0x00000020 try: - event = json.loads( - event_bytes.decode(encoding=self.PERSISTER_ENCODING) - ) + event = json.loads(event_str) except BaseException as e: - errors.append(e) - errors.append( - JSONDecodingError("reupload_events", uid=message_id) + path_dbg |= 0x00000040 + problems.add_error(e).add_error( + JSONDecodingError( + f"reupload_events - raw json:\n<\n{event_str}\n>", + uid=event_id, + ) ) else: + path_dbg |= 0x00000080 self.publish_upstream(event, AckRequired=True) - case Err(error): - errors.append(error) - if errors: - return Err(Problems(errors=errors)) - return Ok() + self._logger.path( + "--_reupload_event:1 path:0x%08X", path_dbg + ) + return Ok(True) + case Err(error): + path_dbg |= 0x00000100 + problems.add_problems(error) + self._logger.path("--_reupload_event:0 path:0x%08X", path_dbg) + return Err(problems) def start( self, loop: asyncio.AbstractEventLoop, async_queue: asyncio.Queue ) -> None: + if self.upstream_client: + self._reuploads.stats = self._stats.link(self.upstream_client) self._mqtt_clients.start(loop, async_queue) self.generate_event(StartupEvent()) self._states.start_all() @@ -410,17 +494,11 @@ def process_ack(self, link_name: str, message_id: str): self._event_persister.clear(message_id) if self._reuploads.reuploading() and link_name == self.upstream_client: path_dbg |= 0x00000002 - reupload_now = self._reuploads.process_ack_for_reupload(message_id) - if reupload_now: - path_dbg |= 0x00000004 - self._reupload_events(reupload_now) - self._logger.path( - "events pending: %d reupload pending: %d", - self._event_persister.num_pending, - self._reuploads.num_reupload_pending, + self._continue_reupload( + self._reuploads.process_ack_for_reupload(message_id) ) if not self._reuploads.reuploading(): - path_dbg |= 0x00000008 + path_dbg |= 0x00000004 self._logger.info("reupload complete.") self._logger.path("--LinkManager.process_ack path:0x%08X", path_dbg) diff --git a/src/gwproactor/links/reuploads.py b/src/gwproactor/links/reuploads.py index 1321888..ef31b48 100644 --- a/src/gwproactor/links/reuploads.py +++ b/src/gwproactor/links/reuploads.py @@ -1,65 +1,159 @@ +"""A 'reupload' is a process of uploading events that have not yet been acked when communication with the upstream +peer is re-activated. The re-upload proceeds slowly by receiving acks so as not to overwhelm processing queues. Events +that occur after communication has been reactivated and/or during a re-upload are not part of the reupload, but are sent +as they occur. + +This module provides a class, Reuploads, for tracking which events are part of the reupload. + +This module only manages events ids; it does not change (add or remove) event storage. +""" + +from typing import Optional + from gwproactor.logger import ProactorLogger -from gwproactor.persister import PersisterInterface +from gwproactor.stats import LinkStats + + +class _ReuploadDiffLogger: # pragma: no cover + """Helper class for logging results of an ack without to much logging code bulk in the ack processing routine""" + + reuploads: Optional["Reuploads"] = None + event_id: str = "" + verbose: bool = False + begin_reuploading: bool = False + begin_num_unacked: int = -1 + begin_num_pending: int = -1 + begin_verbose_str: str = "" + + def init( + self, + reuploads: Optional["Reuploads"] = None, + verbose: bool = False, + ): + self.reuploads = reuploads + self.verbose = verbose + if reuploads is not None: + self.begin_reuploading = reuploads.reuploading() + self.begin_num_unacked = reuploads.num_reuploaded_unacked + self.begin_num_pending = reuploads.num_reupload_pending + if verbose: + self.begin_verbose_str = reuploads.get_str(num_events=100) + + def diff_str(self, path_dbg: int) -> str: + s = "" + if self.reuploads is not None: + if self.verbose: + s += f"Begin reuploads:\n{self.begin_verbose_str}\n" + s += f"End reuploads:\n{self.reuploads.get_str(num_events=100)}\n" + s += ( + f"path:0x{path_dbg:08X} " + f"reuploading: {int(self.begin_reuploading)} -> {int(self.reuploads.reuploading())} " + f"unacked: {self.begin_num_unacked} -> {self.reuploads.num_reuploaded_unacked} " + f"pending: {self.begin_num_pending} -> {self.reuploads.num_reupload_pending} " + ) + return s + + def ack_str(self, path_dbg: int) -> str: + return f"--process_ack_for_reupload {self.diff_str(path_dbg)}" + + def log_ack(self, path_dbg: int) -> None: + if self.reuploads is not None and self.reuploads._logger.path_enabled: # noqa + self.reuploads._logger.path(self.ack_str(path_dbg)) # noqa class Reuploads: + """Track event uids that are part of a re-upload, both those that have not yet been sent (pending) and those that are + "in-flight", (unacked). Upon ack, update records and return next message to be sent. + """ + NUM_INITIAL_EVENTS: int = 5 + """Default number of events to send when re-upload starts.""" - _event_persister: PersisterInterface _reupload_pending: dict[str, None] + """'Ordered set' of *unsent* events that are part of this reupload. + A dict is used to provide insertion order and fast lookup.""" + _reuploaded_unacked: dict[str, None] + """'Ordered set' of *sent but as-yet unacked* + A dict is used to provide insertion order and fast lookup.""" + _num_initial_events: int + """Number of events to send when re-upload starts.""" + + stats: Optional[LinkStats] = None + """Object into which we can record reupload start and complete. Set during + LinkManager.start() since upstream client does not exist durin LinkManager + construction. """ + _logger: ProactorLogger def __init__( self, - event_persister: PersisterInterface, logger: ProactorLogger, num_initial_events: int = NUM_INITIAL_EVENTS, ): - self._event_persister = event_persister self._reupload_pending = dict() self._reuploaded_unacked = dict() self._num_initial_events = num_initial_events self._logger = logger - def __str__(self): - s = f"Reuploads: {len(self._reupload_pending)}" - for message_id in self._reupload_pending: - s += f"\n {message_id}" - return s - - def start_reupload(self) -> list[str]: - pending_events = self._event_persister.pending() + def start_reupload(self, pending_events: list[str]) -> list[str]: + """Track all pending_events for reupload. Of the pending_events, + record the first _num_initial_events as "unacked" and the rest + as "pending". Return the "unacked" group so they can be sent. + """ reupload_now = pending_events[: self._num_initial_events] self._reuploaded_unacked = dict.fromkeys(reupload_now) self._reupload_pending = dict.fromkeys( pending_events[self._num_initial_events :] ) + if self.reuploading(): + self.stats.start_reupload() + self._log_start_reupload(len(pending_events), len(reupload_now)) return reupload_now - def process_ack_for_reupload(self, message_id: str) -> list[str]: + def clear_unacked_event(self, ack_id: str) -> None: + self._reuploaded_unacked.pop(ack_id) + + def process_ack_for_reupload(self, ack_id: str) -> list[str]: + """If ack_id is in our "unacked" store, remove it from the unacked store. If any events remain in our "pending" + store, move the first pending event from the pending store to the unacked store and return it for sending + next.""" + + path_dbg = 0 + was_reuploading = self.reuploading() + ack_logger = _ReuploadDiffLogger() + if self._logger.path_enabled: + ack_logger.init(self, verbose=True) reupload_now = [] - # self._logger.path( - # f"++process_ack_for_reupload reuploading:{self.reuploading()} num_reupload_pending: {self.num_reupload_pending}" - # ) - # old_num_dbg = self.num_reupload_pending - # path_dbg = 0 - if message_id in self._reuploaded_unacked: - # path_dbg |= 0x00000001 - self._reuploaded_unacked.pop(message_id) + if ack_id in self._reuploaded_unacked: + path_dbg |= 0x00000001 + self._reuploaded_unacked.pop(ack_id) if self._reupload_pending: - # path_dbg |= 0x00000002 + path_dbg |= 0x00000002 reupload_next = next(iter(self._reupload_pending)) self._reupload_pending.pop(reupload_next) self._reuploaded_unacked[reupload_next] = None reupload_now = [reupload_next] - # self._logger.path( - # f"--process_ack_for_reupload path:0x{path_dbg:08X} " - # f"reuploading:{self.reuploading()} " - # f"num_reupload_pending: {old_num_dbg} -> {self.num_reupload_pending} " - # f"num reupload_now: {len(reupload_now)}" - # ) + # This case is likely in testing (which explicitly generates + # the awaiting_setup state), but unlikely in the real works, since + # unless we have many subscriptions we will get one suback for all of + # them, meaning we can't enter the awaiting_setup state. + # In awaiting_setup, this case is likely since we generate an event for + # the suback before we start the reupload - we generate the event, send it, + # then add it to the reupload. Because old events are sent first in the reupload, + # the suback event is likely to be in pending, not in unacked. + # In theory this could also happen if an ack for an event sent prior to + # communication loss was somehow preserved in a queue and delivered after comm + # restore. + elif ack_id in self._reupload_pending: + path_dbg |= 0x00000004 + self._reupload_pending.pop(ack_id) + if was_reuploading and not self.reuploading(): + path_dbg |= 0x00000008 + self.stats.complete_reupload() + if self._logger.path_enabled: + ack_logger.log_ack(path_dbg) return reupload_now @property @@ -71,8 +165,39 @@ def num_reuploaded_unacked(self) -> int: return len(self._reuploaded_unacked) def reuploading(self) -> bool: - return bool(self._reuploaded_unacked) + return bool(len(self._reuploaded_unacked) + len(self._reupload_pending)) def clear(self) -> None: self._reupload_pending.clear() self._reuploaded_unacked.clear() + + def get_str(self, verbose: bool = True, num_events: int = 5) -> str: + s = f"Reuploads reuploading:{int(self.reuploading())} unacked/sent:{len(self._reuploaded_unacked)} pending/unsent:{len(self._reupload_pending)}" + if verbose: + s += f" num initial:{self._num_initial_events}\n" + s += f" unacked:{len(self._reuploaded_unacked)}\n" + for message_id in self._reuploaded_unacked: + s += f" {message_id[:8]}...\n" + s += f" pending:{len(self._reupload_pending)}\n" + for i, message_id in enumerate(self._reupload_pending): + s += f" {message_id[:8]}...\n" + if i == num_events - 1: + break + return s.rstrip() + + def __str__(self): + return self.get_str(verbose=False) + + def _log_start_reupload(self, num_pending_events, num_reupload_now): + if self._logger.general_enabled: + if self.reuploading(): + state_str = f"{self.num_reupload_pending} reupload events pending." + else: + state_str = "reupload complete." + self._logger.info( + f"start_reupload: sent {num_reupload_now} events. " + f"{state_str} " + f"Total events in reupload: {num_pending_events}." + ) + if self._logger.path_enabled: + self._logger.path(self.get_str(num_events=100)) diff --git a/src/gwproactor/persister.py b/src/gwproactor/persister.py index d2fbac3..fafcfa0 100644 --- a/src/gwproactor/persister.py +++ b/src/gwproactor/persister.py @@ -59,7 +59,13 @@ class TrimFailed(PersisterError): ... class ReindexError(PersisterError): ... -class JSONDecodingError(PersisterException): ... +class DecodingError(PersisterError): ... + + +class ByteDecodingError(DecodingError): ... + + +class JSONDecodingError(DecodingError): ... class UIDExistedWarning(PersisterWarning): ... @@ -74,6 +80,9 @@ class FileMissingWarning(PersisterWarning): ... class UIDMissingWarning(PersisterWarning): ... +class FileEmptyWarning(PersisterWarning): ... + + class PersisterInterface(abc.ABC): @abstractmethod def persist(self, uid: str, content: bytes) -> Result[bool, Problems]: @@ -197,9 +206,10 @@ def __init__( self._base_dir = Path(base_dir).resolve() self._max_bytes = max_bytes self._curr_dir = self._today_dir() + self._curr_bytes = 0 self._pat_watchdog_args = pat_watchdog_args self._reindex_pat_seconds = reindex_pat_seconds - self.reindex() + self._pending = dict() @property def max_bytes(self) -> int: diff --git a/src/gwproactor/proactor_implementation.py b/src/gwproactor/proactor_implementation.py index cc55c97..3054147 100644 --- a/src/gwproactor/proactor_implementation.py +++ b/src/gwproactor/proactor_implementation.py @@ -76,6 +76,7 @@ class Proactor(ServicesInterface, Runnable): _logger: ProactorLogger _stats: ProactorStats _event_persister: PersisterInterface + _reindex_problems: Optional[Problems] = None _loop: Optional[asyncio.AbstractEventLoop] = None _receive_queue: Optional[asyncio.Queue] = None _links: LinkManager @@ -113,6 +114,9 @@ def __init__( self._logger = ProactorLogger(**settings.logging.qualified_logger_names()) self._stats = self.make_stats() self._event_persister = self.make_event_persister(settings) + reindex_result = self._event_persister.reindex() + if reindex_result.is_err(): + self._reindex_problems = reindex_result.err() self._links = LinkManager( publication_name=self.publication_name, settings=settings, @@ -621,6 +625,11 @@ async def run_forever(self): self._loop = asyncio.get_running_loop() self._receive_queue = asyncio.Queue() self._links.start(self._loop, self._receive_queue) + if self._reindex_problems is not None: + self.generate_event( + self._reindex_problems.problem_event("Startup event reindex() problems") + ) + self._reindex_problems = None for communicator in self._communicators.values(): if isinstance(communicator, Runnable): communicator.start() diff --git a/src/gwproactor/stats.py b/src/gwproactor/stats.py index 1008980..7103e23 100644 --- a/src/gwproactor/stats.py +++ b/src/gwproactor/stats.py @@ -9,6 +9,18 @@ from gwproactor.message import MQTTReceiptPayload +@dataclass +class ReuploadCounts: + started: int = 0 + completed: int = 0 + + def start(self): + self.started += 1 + + def complete(self): + self.completed += 1 + + @dataclass class LinkStats: name: str @@ -19,8 +31,15 @@ class LinkStats: default_factory=lambda: defaultdict(int) ) comm_event_counts: dict[str, int] = field(default_factory=lambda: defaultdict(int)) + reupload_counts: ReuploadCounts = field(default_factory=ReuploadCounts) timeouts: int = 0 + def start_reupload(self): + self.reupload_counts.start() + + def complete_reupload(self): + self.reupload_counts.complete() + @property def num_received(self) -> int: return self.num_received_by_type[Message.type_name()] @@ -39,12 +58,15 @@ def __str__(self) -> str: s += "\n Comm event counts:" for comm_event in self.comm_event_counts: s += f"\n {self.comm_event_counts[comm_event]:3d}: [{comm_event}]" + s += f"\n {self.reupload_counts.started:3d}: [reuploads_started]" + s += f"\n {self.reupload_counts.completed:3d}: [reuploads_completed]" return s class ProactorStats: num_received_by_type: dict[str, int] num_received_by_topic: dict[str, int] + num_events_received: int = 0 links: dict[str, LinkStats] def __init__(self, link_names: Optional[Sequence[str]] = None): @@ -65,6 +87,8 @@ def add_mqtt_message(self, message: Message[MQTTReceiptPayload]) -> None: link_stats.num_received_by_type[Message.type_name()] += 1 link_stats.num_received_by_type[message.Header.MessageType] += 1 link_stats.num_received_by_topic[message.Payload.message.topic] += 1 + if "gridworks-event" in message.Payload.message.topic: + self.num_events_received += 1 def total_received(self, message_type: str) -> int: return self.num_received_by_type.get(message_type, 0) @@ -93,6 +117,7 @@ def __str__(self) -> str: s += "\nGlobal received by message_type:" for message_type in sorted(self.num_received_by_type): s += f"\n {self.num_received_by_type[message_type]:3d}: [{message_type}]" + s += f"\n {self.num_events_received:3d}: [gridworks.event*]" for link_name in sorted(self.links): s += "\n" s += str(self.links[link_name]) diff --git a/src/gwproactor_test/comm_test_helper.py b/src/gwproactor_test/comm_test_helper.py index 062e176..14d173b 100644 --- a/src/gwproactor_test/comm_test_helper.py +++ b/src/gwproactor_test/comm_test_helper.py @@ -12,6 +12,8 @@ from gwproactor import Proactor from gwproactor import ProactorSettings from gwproactor import setup_logging +from gwproactor.config import DEFAULT_BASE_NAME +from gwproactor.config import LoggingSettings from gwproactor.config import MQTTClient from gwproactor.config import Paths from gwproactor_test import copy_keys @@ -49,6 +51,8 @@ class CommTestHelper: parent_helper: ProactorTestHelper child_helper: ProactorTestHelper verbose: bool + child_verbose: bool + parent_verbose: bool parent_on_screen: bool lifecycle_logging: bool logger_guards: LoggerGuards @@ -67,6 +71,8 @@ def __init__( child_settings: Optional[ChildSettingsT] = None, parent_settings: Optional[ParentSettingsT] = None, verbose: bool = False, + child_verbose: bool = False, + parent_verbose: bool = False, lifecycle_logging: bool = False, add_child: bool = False, add_parent: bool = False, @@ -95,13 +101,20 @@ def __init__( parent_name, parent_path_name, ( - self.parent_settings_t(paths=Paths(name=Path(parent_path_name))) + self.parent_settings_t( + logging=LoggingSettings( + base_log_name=f"parent_{DEFAULT_BASE_NAME}" + ), + paths=Paths(name=Path(parent_path_name)), + ) if parent_settings is None else parent_settings ), dict() if parent_kwargs is None else parent_kwargs, ) self.verbose = verbose + self.child_verbose = child_verbose + self.parent_verbose = parent_verbose self.parent_on_screen = parent_on_screen self.lifecycle_logging = lifecycle_logging self.setup_logging() @@ -207,7 +220,6 @@ def setup_logging(self): if not self.lifecycle_logging: self.child_helper.settings.logging.levels.lifecycle = logging.WARNING self.parent_helper.settings.logging.levels.lifecycle = logging.WARNING - args = argparse.Namespace(verbose=self.verbose) self.logger_guards = LoggerGuards( list(self.child_helper.settings.logging.qualified_logger_names().values()) + list( @@ -215,7 +227,7 @@ def setup_logging(self): ) ) setup_logging( - args, + argparse.Namespace(verbose=self.verbose or self.child_verbose), self.child_helper.settings, errors, add_screen_handler=True, @@ -223,7 +235,7 @@ def setup_logging(self): ) assert not errors setup_logging( - args, + argparse.Namespace(verbose=self.verbose or self.parent_verbose), self.parent_helper.settings, errors, add_screen_handler=self.parent_on_screen, diff --git a/src/gwproactor_test/proactor_recorder.py b/src/gwproactor_test/proactor_recorder.py index ada9c82..8579c8b 100644 --- a/src/gwproactor_test/proactor_recorder.py +++ b/src/gwproactor_test/proactor_recorder.py @@ -248,11 +248,7 @@ def summary_str(self: ProactorT) -> str: s += "Pending acks:\n" for link_name in self.stats.links: s += f" {link_name:10s} {self._links.num_acks(link_name):3d}\n" - s += ( - f"pending events: {self._links.num_pending} " - f"pending upload events: {self._links.num_reupload_pending} " - f"reuploading: {self._links.reuploading()}\n" - ) + s += self._links.get_reuploads_str() + "\n" s += f"subacks_paused: {self.subacks_paused} pending_subacks: {len(self.pending_subacks)}\n" return s diff --git a/src/gwproactor_test/proactor_test_collections.py b/src/gwproactor_test/proactor_test_collections.py index 9502892..f7ebb2f 100644 --- a/src/gwproactor_test/proactor_test_collections.py +++ b/src/gwproactor_test/proactor_test_collections.py @@ -2,20 +2,102 @@ import logging import time import warnings +from dataclasses import dataclass +from pathlib import Path from typing import Type import pytest from gwproto import MQTTTopic from paho.mqtt.client import MQTT_ERR_CONN_LOST +from gwproactor import ServicesInterface from gwproactor.links import StateName from gwproactor.message import DBGEvent from gwproactor.message import DBGPayload +from gwproactor.persister import TimedRollingFilePersister from gwproactor_test.certs import uses_tls from gwproactor_test.comm_test_helper import CommTestHelper from gwproactor_test.wait import await_for +@dataclass +class _EventEntry: + uid: str + path: Path + + +class _EventGen: + ok: list[_EventEntry] + corrupt: list[_EventEntry] + empty: list[_EventEntry] + missing: list[_EventEntry] + + persister: TimedRollingFilePersister + + def __len__(self) -> int: + return len(self.ok) + len(self.corrupt) + len(self.empty) + + def __init__(self, proactor: ServicesInterface): + self.ok = [] + self.corrupt = [] + self.empty = [] + self.missing = [] + persister = proactor._event_persister # noqa + assert isinstance(persister, TimedRollingFilePersister) + self.persister = persister + + def _generate_event(self, member_name: str) -> _EventEntry: + event = DBGEvent(Command=DBGPayload(), Msg=f"event {len(self)} {member_name}") + ret = self.persister.persist( + event.MessageId, event.json(sort_keys=True, indent=2).encode() + ) + if ret.is_err(): + raise ret.err() + entry = _EventEntry( + event.MessageId, self.persister.get_path(event.MessageId) # noqa + ) + getattr(self, member_name).append(entry) + return entry + + def _generate_ok(self) -> _EventEntry: + return self._generate_event("ok") + + def _generate_corrupt(self) -> _EventEntry: + entry = self._generate_event("corrupt") + with entry.path.open() as f: + contents = f.read() + with entry.path.open("w") as f: + f.write(contents[:-6]) + return entry + + def _generate_empty(self) -> _EventEntry: + entry = self._generate_event("empty") + with entry.path.open("w") as f: + f.write("") + return entry + + def _generate_missing(self) -> _EventEntry: + entry = self._generate_event("missing") + entry.path.unlink() + return entry + + def generate( + self, + num_ok: int = 0, + num_corrupt: int = 0, + num_empty: int = 0, + num_missing: int = 0, + ): + for _ in range(num_ok): + self._generate_ok() + for _ in range(num_corrupt): + self._generate_corrupt() + for _ in range(num_empty): + self._generate_empty() + for _ in range(num_missing): + self._generate_missing() + + @pytest.mark.asyncio class ProactorCommTests: CTH: Type[CommTestHelper] @@ -1142,6 +1224,7 @@ async def test_reupload_basic(self): child = h.child child.disable_derived_events() upstream_link = h.child._links.link(child.upstream_client) + reupload_counts = h.child.stats.link(child.upstream_client).reupload_counts await await_for( lambda: child.mqtt_quiescent(), 1, @@ -1153,6 +1236,8 @@ async def test_reupload_basic(self): assert child._links.num_reupload_pending == 0 assert child._links.num_reuploaded_unacked == 0 assert not child._links.reuploading() + assert reupload_counts.started == 0 + assert reupload_counts.completed == 0 # Start parent, wait for reconnect. h.start_parent() @@ -1165,7 +1250,7 @@ async def test_reupload_basic(self): # Wait for reuploading to complete await await_for( - lambda: not child._links.reuploading(), + lambda: reupload_counts.completed > 0, 1, "ERROR waiting for re-upload to complete", err_str_f=h.summary_str, @@ -1191,6 +1276,7 @@ async def test_reupload_flow_control_simple(self): child = h.child child.disable_derived_events() upstream_link = h.child._links.link(child.upstream_client) + reupload_counts = h.child.stats.link(child.upstream_client).reupload_counts await await_for( lambda: child.mqtt_quiescent(), 1, @@ -1227,9 +1313,9 @@ async def test_reupload_flow_control_simple(self): # Wait for reupload to complete await await_for( - lambda: not child._links.reuploading(), + lambda: reupload_counts.completed > 0, 1, - "ERROR waiting for reupload", + "ERROR waiting for reupload to complete", err_str_f=h.summary_str, ) @@ -1404,3 +1490,61 @@ async def test_reupload_flow_control_detail(self): last_num_repuload_pending = curr_num_repuload_pending assert not child_links.reuploading() + + @pytest.mark.asyncio + async def test_reupload_errors(self): + async with self.CTH( + start_child=True, + add_parent=True, + child_verbose=False, + ) as h: + child = h.child + child.disable_derived_events() + reupload_counts = h.child.stats.link(child.upstream_client).reupload_counts + child_links = h.child._links + upstream_link = child_links.link(child.upstream_client) + parent = h.parent + + def _err_str() -> str: + return ( + f"\nCHILD\n{child.summary_str()}\n" + f"\nPARENT\n{parent.summary_str()}\n" + ) + + await await_for( + lambda: child.mqtt_quiescent(), + 1, + "ERROR waiting for child to connect to mqtt", + err_str_f=_err_str, + ) + base_num_pending = child_links.num_pending + assert base_num_pending > 0 + assert child_links.num_reupload_pending == 0 + assert child_links.num_reuploaded_unacked == 0 + assert not child_links.reuploading() + + generator = _EventGen(child) + generator.generate(num_corrupt=10) + generator.generate(num_ok=10) + generator.generate(num_empty=10) + generator.generate(num_ok=10) + generator.generate(num_missing=10) + generator.generate(num_ok=10) + + h.start_parent() + await await_for( + lambda: upstream_link.active(), + 1, + "ERROR waiting for active", + err_str_f=_err_str, + ) + + # Wait for reupload to complete + await await_for( + lambda: reupload_counts.completed > 0, + 3, + "ERROR waiting for reupload to complete", + err_str_f=_err_str, + ) + assert reupload_counts.started == reupload_counts.completed + assert parent.stats.num_events_received >= base_num_pending + 60 diff --git a/tests/test_proactor/test_persister.py b/tests/test_proactor/test_persister.py index 77e0040..48b7cd7 100644 --- a/tests/test_proactor/test_persister.py +++ b/tests/test_proactor/test_persister.py @@ -202,6 +202,7 @@ def assert_contents( base_dir=p.base_dir, max_bytes=p.max_bytes, ) + assert p2.reindex().is_ok() if p.num_pending == 0: assert p.curr_bytes == 0 assert p.pending() == p2.pending() @@ -343,6 +344,7 @@ def inc_event(): # empty persister max_bytes = (num_events_supported + 1) * 1000 p = TimedRollingFilePersister(settings.paths.event_dir, max_bytes=max_bytes) + assert p.reindex().is_ok() assert_contents(p, max_bytes=max_bytes, num_pending=0) # a few @@ -415,6 +417,7 @@ def inc_event(): pendulum_travel_to(d1) exact_days = [d1] p = TimedRollingFilePersister(settings.paths.event_dir) + assert p.reindex().is_ok() assert_contents(p, num_pending=0, curr_dir=d1.isoformat()) result = p.persist(event.MessageId, event.json().encode()) assert result.is_ok() @@ -536,6 +539,7 @@ def inc_uid() -> str: exact_days = [] pendulum_travel_to(d1) p = TimedRollingFilePersister(settings.paths.event_dir, max_bytes=max_size) + assert p.reindex().is_ok() assert_contents(p, num_pending=0, curr_dir=d1.isoformat(), max_bytes=max_size) for i in range(1, 3): uids.append(inc_uid()) @@ -768,6 +772,7 @@ def inc_uid() -> str: try: pendulum_travel_to(d1) p = TimedRollingFilePersister(settings.paths.event_dir) + assert p.reindex().is_ok() p.persist(inc_uid(), buf) p.persist(inc_uid(), buf) @@ -785,6 +790,7 @@ def inc_uid() -> str: index = dict(p._pending) p = TimedRollingFilePersister(settings.paths.event_dir) + assert p.reindex().is_ok() assert p._pending == index # removed dir @@ -806,6 +812,7 @@ def inc_uid() -> str: # invalid file - invalid date shutil.copy(p6, p6_dir / ("x" + p6.name)) p = TimedRollingFilePersister(settings.paths.event_dir) + assert p.reindex().is_ok() assert p._pending == index finally: @@ -831,6 +838,7 @@ def inc_uid() -> str: try: pendulum_travel_to(d1) p = TimedRollingFilePersister(settings.paths.event_dir) + assert p.reindex().is_ok() uids.append(inc_uid()) exact_days.append(d1) @@ -862,6 +870,7 @@ def _roll_curr_dir(self): raise ValueError("whoops") broken = BrokenRoller(settings.paths.event_dir) + assert broken.reindex().is_ok() problems = broken.persist("bla", buf).unwrap_err() assert len(problems.errors) == 2 assert len(problems.warnings) == 0 @@ -874,6 +883,7 @@ def clear(self, uid: str) -> Result[bool, Problems]: raise ValueError("arg") p = BrokenRoller2(settings.paths.event_dir, max_bytes=len(buf) + 50) + assert p.reindex().is_ok() problems = p.persist("xxxbla", buf).unwrap_err() assert len(problems.errors) == 3 assert len(problems.warnings) == 0 @@ -883,6 +893,7 @@ def clear(self, uid: str) -> Result[bool, Problems]: # _trim_old_storage, clear error, file missing p = TimedRollingFilePersister(settings.paths.event_dir, max_bytes=len(buf) + 50) + assert p.reindex().is_ok() p.get_path(uids[-1]).unlink() problems = p.persist("xxxbla", buf).unwrap_err() assert len(problems.errors) == 0 @@ -907,6 +918,7 @@ def clear(self, uid: str) -> Result[bool, Problems]: shutil.rmtree(p.base_dir) settings.paths.mkdirs() p = TimedRollingFilePersister(settings.paths.event_dir, max_bytes=len(buf) + 50) + assert p.reindex().is_ok() p.persist(uids[-1], buf).unwrap() class BrokenRoller3(TimedRollingFilePersister):