From 1f28952eccccc3e8992b0a969d7321c75d04efe2 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 8 Mar 2024 17:56:28 +0200 Subject: [PATCH 001/127] Add clickhouse driver dependency #1055 Signed-off-by: Marcel Coetzee --- poetry.lock | 195 +++++++++++++++++++++++++++++++++++-------------- pyproject.toml | 1 + 2 files changed, 143 insertions(+), 53 deletions(-) diff --git a/poetry.lock b/poetry.lock index cad68180dc..5d49191996 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "about-time" @@ -1670,6 +1670,128 @@ files = [ click = ">=4.0" PyYAML = ">=3.11" +[[package]] +name = "clickhouse-driver" +version = "0.2.7" +description = "Python driver with native interface for ClickHouse" +optional = false +python-versions = ">=3.7, <4" +files = [ + {file = "clickhouse-driver-0.2.7.tar.gz", hash = "sha256:299cfbe6d561955d88eeab6e09f3de31e2f6daccc6fdd904a59e46357d2d28d9"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c44fefc2fd44f432d5b162bfe34ad76840137c34167d46a18c554a7c7c6e3566"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e018452a7bf8d8c0adf958afbc5b0d29e402fc09a1fb34e9186293eae57f3b4e"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff8b09f8b13df28d2f91ee3d0d2edd9589cbda76b74acf60669112219cea8c9d"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54aa91c9512fd5a73f038cae4f67ca2ff0b2f8a84de846179a31530936ef4e20"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8342a7ba31ccb393ee31dfd61173aa84c995b4ac0b44d404adc8463534233d5"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:199000f8adf38fade0b5a52c273a396168105539de741a18ba3e68d7fc06e0e6"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60a2a40602b207506e505cfb184a81cd4b752bde17153bc0b32c3931ddb792f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5db3a26b18146b2b0b06d3f32ce588af5afaa38c719daf6f9606981514228a8b"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5579a31da1f3cf49630e43fbbb11cab891b78161abdcb33908b79820b7cd3a23"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cc39f0fb761aed96917b0f55679174a50f9591afc0e696e745cd698ef822661f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9aa0f7c740e4e61886c6d388792c5d1a2084d4b5462e6dcfc24e30ca7e7f8e68"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2caee88b6eec7b33ddbccd24501ad99ff8ff2b0a6a4471945cbfb28947a9a791"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win32.whl", hash = "sha256:a4aef432cc7120a971eebb7ca2fddac4472e810b57e403d3a371b0c69cbb2bb0"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f307de7df6bc23ad5ec8a1ba1db157f4d14de673ddd4798f37790f23255605b0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbf3ca8919bf856ca6588669a863065fb732a32a6387095f64d19038fd99db9f"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab68b3d9b9d1386adfd3a57edd47b62858a145bf7ccc7f11b31d308195d966e5"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985a9d60044c5ad39c6e018b852c7105ec4ebfdf4c3abe23183b4867454e570a"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c94330054c8d92d2286898906f843f26e2f96fc2aa11a9a96a7b5593d299bf0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92938f55c8f797e50e624a4b96e685178d043cdf0ede306a7fd4e7dda19b8dfd"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bd53e9bf49c3013d06f9e6d2812872d44b150f7a2d1cf18e1498257d42330e"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f1f8ed5404e283a9ded499c33eade2423fdc15e31f8a711d75e91f890d0f70b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a398085e4a1766d907ac32c282d4172db38a44243bde303372396208d1cbf4bb"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fa1808593123b6056f93808f0afbc7938f06a8149cb4e381aa7b1a234c1d3c18"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:0512d54ae23bd4a69278e04f42b651d7c71b63ba6043e2c6bd97b11329692f99"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5bc2b67e7e68f74ccebf95a8b3a13f13a7c34b89b32c9813103221de14c06c8b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:04a37cdafc671cb796af3e566cef0aeb39111d82aebeecd9106a049434953b26"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win32.whl", hash = "sha256:019538c7c23e976538e5081dd2f77a8a40bf663c638a62d857ff05f42b0c9052"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5166643683584bc53fcadda73c65f6a9077feb472f3d167ecef1a1a7024973aa"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:59affab7b5a3c4aab5b6a730f606575efdefea213458de2eb14927ee4e0640f4"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcb93dd07fe65ac4f1a2bc0b8967911d4ad2152dbee000f025ea5cb575da5ecb"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55a48019b79181ae1ca90e980e74c5d413c3f8829f6744e2b056646c2d435a1a"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:507463c9157240fd7c3246781e8c30df8db3c80bf68925b36ff3ad4a80c4b924"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2d8d2295ee9e0cfab8ad77cb635a05da2160334b4f16ed8c3d00fbf39a2343"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e38c44546dcdb956b5ab0944cb3d51e8c98f816e75bab1a2254c478865bc6e7b"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6690a2bdd9e7531fe50b53193279f8b35cbcd5c5ee36c0fcc112518a7d24f16"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc6b4ba0a6467fd09021aa1d87a44fb4589600d61b010fca41e0dfffd0dee322"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:254bbd400eb87ff547a08755bc714f712e11f7a6d3ebbbb7aaa1dd454fb16d44"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7bbbe3f8b87fc1489bc15fa9c88cc9fac9d4d7d683d076f058c2c83e6ee422fd"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:745e5b18f0957d932151527f1523d0e516c199de8c589638e5f55ab2559886f3"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fa0357fb5f26149e3df86a117d3678329b85d8827b78a5a09bbf224d8dd4541"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win32.whl", hash = "sha256:ace652af7ca94ba3cb3a04a5c363e135dc5009f31d8201903e21db9d5daf2358"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:c0ba68489544df89e4138a14b0ec3e1e5eb102d5d3283a91d9b837c420c0ab97"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66267e4ba21fa66c97ce784a5de2202d3b7d4db3e50bfcdde92830a68f6fae30"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cf55c285b75c178487407721baef4980b3c6515c9c0c1a6c1ea8b001afe658e"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:deeb66bb56490db2157f199c6d9aa2c53f046677be430cc834fc1e74eec6e654"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dfe5b4020939abeeb407b4eead598c954b1573d2d2b4f174f793b196d378b9d9"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84d39506b5f8d86a1195ebde1c66aba168f34ebce6ebd828888f0625cac54774"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f93a27db2dcbbd3ecad36e8df4395d047cb7410e2dc69f6d037674e15442f4ee"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ebc29e501e47ecbfd44c89c0e5c87b2a722049d38b9e93fdd4bea510a82e16ac"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f9cc8c186fea09a94d89e5c9c4e8d05ec3a80e2f6d25673c48efec8117a13cfc"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0757dfde5410c42230b24825ea3ab904a78160520e5ceb953482e133e368733b"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c9f88818cf411f928c29ba295c677cd95773bd256b8490f5655fb489e0c6658c"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e19952f158ebe274c65ffeb294ba378d75048a48f31b77573948d606bed019d5"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:008b1f32c7c68564de8051482b72a5289b6933bca9d9b1ad1474dd448d6768ba"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:622933cc9834c39f03de5d43a12f13fc7133d31d6d2597e67866d4a549ca9e60"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:92540581e5b5f36d915f14d05c30244870fb123c74b38c645fa47663053c5471"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02dfadc6111b64e01c20b8c11266cab97d4f06685a392a183af437f2f1afb990"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ca17fece86fe85d97705024bec881978271931b3d00db273c9d63244f7d606"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76474f1315ca3ab484ae28ad085b8f756c8b9a755882f93912b2149290482033"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5c0ff12368b34aaf58dd948b0819e5b54d261911de334d3f048328dc9354013"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd441b17294e90e313b08fabf84fcc782c191d2b9b2a924f163928202db6fcc"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62aa158f61d7d84c58e8cd75b3b8340b28607e5a70132395078f578d518aaae3"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcb2a39a1fef8bf1b581f06125c2a84a5b92c939b079d1a95126e3207b05dc77"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f29cc641a65e89a51a15f6d195f565ad2761d1bd653408c6b4046c987c5fb99"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ac1a43690696bda46c9a23fc6fd79b6fe22d428a18e880bdbdf5e6aeb31008c5"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1dd5ea4584c42f85d96ddfa7d07da2abb35a797c45e4d3a66ace149ee4977cad"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a736c0af858a3c83af03848b18754ab18dc594cc7f3bf6be0b1fac682def182c"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win32.whl", hash = "sha256:6cb8ca47f5818c1bc5814b9ff775e383f3c50059b1fd28a02cb9be1b666929f8"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:a90e7dc92985669a5e6569356bb3028d9d475f95006d4487cb0789aa53f9489c"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04b77cd6c583da9135db4a62c5a7999ae248c2dbfc0cb8e8a3d8a853b1fbfa11"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c7671f8c0e8960d766b2e0eaefcae3088fccdd3920e9cd3dee8e344cfd0a6929"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:502d7cd28522b95a399e993ffd48487e8c12c50ce2d4e89b77b938f945304405"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:969739279f4010e7b5b6b2c9d2ab56a463aed11fdaed5e02424c1b3915f144f8"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed34b60f741eeb02407ea72180d77cbfc368c1be6fc2f2ff8319d1856ce67e10"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a667b48927f4420eb8c03fa33369edfbdf359a788897a01ac945263a2a611461"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f93aa3a90f3847872d7464ec9076482b2e812c4e7d61682daedffdf3471be00"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:190890667215691fdf2155c3b233b39146054ab1cd854c7d91221e6ed633d71e"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ff280aeac5e96c764cd31ba1077c95601337b9a97fb0b9ed4d24c64431f2c322"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01e63e35d2ab55b8eb48facf6e951968c80d27ee6703aa6c91c73d9d0a4d0efe"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a29fb24b910dafc8c11ba882797d13ec0323a97dce80a57673116fa893d1b669"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5f229a7853fc767e63143ea69889d49f6fd5623adc2f7b0f7eb360117d7e91a5"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win32.whl", hash = "sha256:b7f34ad2ed509f48f8ed1f9b96e89765173a7b35d286c7350aa85934a11c0f49"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:78b166597afbe490cc0cdac44fed8c8b81668f87125601dda17b154f237eef5d"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:16ab64beb8d079cb9b3200539539a35168f524eedf890c9acefb719e25bdc96e"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03e28fd50fc7c54874bf8e638a2ea87f73ae35bfbbf90123fdb395f38d62f159"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0677b8350acd8d186b6acd0026b62dd262d6fee428a5fa3ad9561908d4b02c39"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2f3c9e2182809131701bb28a606dec90525c7ab20490714714a4b3eb015454b"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e03a1a1b30cc58c9bd2cbe25bf5e40b1f1d16d52d44ddefb3af50435d1ed613c"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a1be8081306a4beb12444ed8e3208e1eb6c01ed207c471b33009c13504c88139"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:933b40722cbca9b1123a5bb2fb4bafafd234deae0f3481125cb6b6fa1d39aa84"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054b5022f9bf15a5f4663a7cd190f466e70a2d7b8d45429d8742c515b556c10"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61744760ee046c9a268cb801ca21bfe44c4873db9901a7cd0f3ca8830205feff"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5e28427e05a72e7a4c3672e36703a2d80107ee0b3ab537e3380d726c96b07821"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c483f5ec836ae87803478f2a7b9daf15343078edd6a8be7364dd9db64905bbd0"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28220b794874e68bc2f06dbfff5748f1c5a3236922f59e127abd58d44ae20a3f"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c09877b59b34d5b3043ad70ec31543173cac8b64b4a8afaa89416b22fb28da5"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3580f78db27119f7380627873214ae1342066f1ecb35700c1d7bf418dd70ae73"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0842ac1b2f7a9ca46dac2027849b241bccd8eb8ff1c59cb0a5874042b267b733"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7a3fb585e2d3514196258a4a3b0267510c03477f3c2380239ade4c056ba689a7"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48ea25287566d45efbaee0857ad25e8b33ffd7fd73e89424d79fe7f532962915"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee4a4935667b59b4816a5ca77300f5dbe5a7416860551d17376426b8fefc1175"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:358058cfceea9b43c4af9de81842563746f16984b34525a15b41eacf8fc2bed2"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ae760fb843dec0b5c398536ca8dfaf243f494ba8fc68132ae1bd62004b0c396a"}, +] + +[package.dependencies] +pytz = "*" +tzlocal = "*" + +[package.extras] +lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"] +numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] +zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] + [[package]] name = "colorama" version = "0.4.6" @@ -3465,56 +3587,6 @@ files = [ {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d140c7b9395b4d1e654127aa1c99bcc603ed01000b7bc7e28c52562f1894ec12"}, {file = "google_re2-1.1-3-cp39-cp39-win32.whl", hash = "sha256:80c5fc200f64b2d903eeb07b8d6cefc620a872a0240c7caaa9aca05b20f5568f"}, {file = "google_re2-1.1-3-cp39-cp39-win_amd64.whl", hash = "sha256:9eb6dbcee9b5dc4069bbc0634f2eb039ca524a14bed5868fdf6560aaafcbca06"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0db114d7e1aa96dbcea452a40136d7d747d60cbb61394965774688ef59cccd4e"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:82133958e003a1344e5b7a791b9a9dd7560b5c8f96936dbe16f294604524a633"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:9e74fd441d1f3d917d3303e319f61b82cdbd96b9a5ba919377a6eef1504a1e2b"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:734a2e7a4541c57253b5ebee24f3f3366ba3658bcad01da25fb623c78723471a"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:d88d5eecbc908abe16132456fae13690d0508f3ac5777f320ef95cb6cab9a961"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:b91db80b171ecec435a07977a227757dd487356701a32f556fa6fca5d0a40522"}, - {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b23129887a64bb9948af14c84705273ed1a40054e99433b4acccab4dcf6a226"}, - {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5dc1a0cc7cd19261dcaf76763e2499305dbb7e51dc69555167cdb8af98782698"}, - {file = "google_re2-1.1-4-cp310-cp310-win32.whl", hash = "sha256:3b2ab1e2420b5dd9743a2d6bc61b64e5f708563702a75b6db86637837eaeaf2f"}, - {file = "google_re2-1.1-4-cp310-cp310-win_amd64.whl", hash = "sha256:92efca1a7ef83b6df012d432a1cbc71d10ff42200640c0f9a5ff5b343a48e633"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:854818fd4ce79787aca5ba459d6e5abe4ca9be2c684a5b06a7f1757452ca3708"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:4ceef51174b6f653b6659a8fdaa9c38960c5228b44b25be2a3bcd8566827554f"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:ee49087c3db7e6f5238105ab5299c09e9b77516fe8cfb0a37e5f1e813d76ecb8"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:dc2312854bdc01410acc5d935f1906a49cb1f28980341c20a68797ad89d8e178"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0dc0d2e42296fa84a3cb3e1bd667c6969389cd5cdf0786e6b1f911ae2d75375b"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6bf04ced98453b035f84320f348f67578024f44d2997498def149054eb860ae8"}, - {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d6b6ef11dc4ab322fa66c2f3561925f2b5372a879c3ed764d20e939e2fd3e5f"}, - {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0dcde6646fa9a97fd3692b3f6ae7daf7f3277d7500b6c253badeefa11db8956a"}, - {file = "google_re2-1.1-4-cp311-cp311-win32.whl", hash = "sha256:5f4f0229deb057348893574d5b0a96d055abebac6debf29d95b0c0e26524c9f6"}, - {file = "google_re2-1.1-4-cp311-cp311-win_amd64.whl", hash = "sha256:4713ddbe48a18875270b36a462b0eada5e84d6826f8df7edd328d8706b6f9d07"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:40a698300b8faddbb325662973f839489c89b960087060bd389c376828978a04"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:103d2d7ac92ba23911a151fd1fc7035cbf6dc92a7f6aea92270ebceb5cd5acd3"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:51fb7182bccab05e8258a2b6a63dda1a6b4a9e8dfb9b03ec50e50c49c2827dd4"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:65383022abd63d7b620221eba7935132b53244b8b463d8fdce498c93cf58b7b7"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396281fc68a9337157b3ffcd9392c6b7fcb8aab43e5bdab496262a81d56a4ecc"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8198adcfcff1c680e052044124621730fc48d08005f90a75487f5651f1ebfce2"}, - {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81f7bff07c448aec4db9ca453d2126ece8710dbd9278b8bb09642045d3402a96"}, - {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7dacf730fd7d6ec71b11d6404b0b26e230814bfc8e9bb0d3f13bec9b5531f8d"}, - {file = "google_re2-1.1-4-cp312-cp312-win32.whl", hash = "sha256:8c764f62f4b1d89d1ef264853b6dd9fee14a89e9b86a81bc2157fe3531425eb4"}, - {file = "google_re2-1.1-4-cp312-cp312-win_amd64.whl", hash = "sha256:0be2666df4bc5381a5d693585f9bbfefb0bfd3c07530d7e403f181f5de47254a"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:5cb1b63a0bfd8dd65d39d2f3b2e5ae0a06ce4b2ce5818a1d1fc78a786a252673"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:e41751ce6b67a95230edd0772226dc94c2952a2909674cd69df9804ed0125307"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:b998cfa2d50bf4c063e777c999a7e8645ec7e5d7baf43ad71b1e2e10bb0300c3"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:226ca3b0c2e970f3fc82001ac89e845ecc7a4bb7c68583e7a76cda70b61251a7"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:9adec1f734ebad7c72e56c85f205a281d8fe9bf6583bc21020157d3f2812ce89"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:9c34f3c64ba566af967d29e11299560e6fdfacd8ca695120a7062b6ed993b179"}, - {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b85385fe293838e0d0b6e19e6c48ba8c6f739ea92ce2e23b718afe7b343363"}, - {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4694daa8a8987cfb568847aa872f9990e930c91a68c892ead876411d4b9012c3"}, - {file = "google_re2-1.1-4-cp38-cp38-win32.whl", hash = "sha256:5e671e9be1668187e2995aac378de574fa40df70bb6f04657af4d30a79274ce0"}, - {file = "google_re2-1.1-4-cp38-cp38-win_amd64.whl", hash = "sha256:f66c164d6049a8299f6dfcfa52d1580576b4b9724d6fcdad2f36f8f5da9304b6"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:25cb17ae0993a48c70596f3a3ef5d659638106401cc8193f51c0d7961b3b3eb7"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:5f101f86d14ca94ca4dcf63cceaa73d351f2be2481fcaa29d9e68eeab0dc2a88"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:4e82591e85bf262a6d74cff152867e05fc97867c68ba81d6836ff8b0e7e62365"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:1f61c09b93ffd34b1e2557e5a9565039f935407a5786dbad46f64f1a484166e6"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:12b390ad8c7e74bab068732f774e75e0680dade6469b249a721f3432f90edfc3"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:1284343eb31c2e82ed2d8159f33ba6842238a56782c881b07845a6d85613b055"}, - {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c7b38e0daf2c06e4d3163f4c732ab3ad2521aecfed6605b69e4482c612da303"}, - {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f4d4f0823e8b2f6952a145295b1ff25245ce9bb136aff6fe86452e507d4c1dd"}, - {file = "google_re2-1.1-4-cp39-cp39-win32.whl", hash = "sha256:1afae56b2a07bb48cfcfefaa15ed85bae26a68f5dc7f9e128e6e6ea36914e847"}, - {file = "google_re2-1.1-4-cp39-cp39-win_amd64.whl", hash = "sha256:aa7d6d05911ab9c8adbf3c225a7a120ab50fd2784ac48f2f0d140c0b7afc2b55"}, ] [[package]] @@ -7040,7 +7112,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -8541,6 +8612,24 @@ files = [ {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "uc-micro-py" version = "1.0.2" @@ -8986,4 +9075,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "a7aa3e523522ab3260a7a19f097a34349b66cf046289db1e17b48f88f7fd189f" +content-hash = "ca6ca3ce92e469087463d0253d9e517c8ec43085576da4ab6199a238ec650743" diff --git a/pyproject.toml b/pyproject.toml index 88e6bd9390..d8c7da5e45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,6 +144,7 @@ cryptography = "^41.0.7" google-api-python-client = ">=1.7.11" pytest-asyncio = "^0.23.5" types-sqlalchemy = "^1.4.53.38" +clickhouse-driver = "^0.2.7" [tool.poetry.group.pipeline] optional=true From d736dee1cdcefbc2fa6c6fd9645f8817d3d6bd2a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 9 Mar 2024 18:46:38 +0200 Subject: [PATCH 002/127] Preliminary wireframe #1055 Signed-off-by: Marcel Coetzee --- dlt/common/data_writers/escape.py | 85 ++++++++++++------- dlt/destinations/impl/clickhouse/__init__.py | 24 ++++++ .../impl/clickhouse/clickhouse.py | 0 .../impl/clickhouse/configuration.py | 39 +++++++++ dlt/destinations/impl/clickhouse/factory.py | 0 .../impl/clickhouse/sql_client.py | 0 dlt/destinations/job_client_impl.py | 21 ++--- 7 files changed, 125 insertions(+), 44 deletions(-) create mode 100644 dlt/destinations/impl/clickhouse/__init__.py create mode 100644 dlt/destinations/impl/clickhouse/clickhouse.py create mode 100644 dlt/destinations/impl/clickhouse/configuration.py create mode 100644 dlt/destinations/impl/clickhouse/factory.py create mode 100644 dlt/destinations/impl/clickhouse/sql_client.py diff --git a/dlt/common/data_writers/escape.py b/dlt/common/data_writers/escape.py index 5460657253..190ce4b2d7 100644 --- a/dlt/common/data_writers/escape.py +++ b/dlt/common/data_writers/escape.py @@ -1,10 +1,11 @@ -import re import base64 -from typing import Any, Dict +import re from datetime import date, datetime, time # noqa: I251 +from typing import Any, Dict from dlt.common.json import json + # use regex to escape characters in single pass SQL_ESCAPE_DICT = {"'": "''", "\\": "\\\\", "\n": "\\n", "\r": "\\r"} @@ -24,14 +25,14 @@ def _escape_extended( ) -> str: escape_dict = escape_dict or SQL_ESCAPE_DICT escape_re = escape_re or SQL_ESCAPE_RE - return "{}{}{}".format(prefix, escape_re.sub(lambda x: escape_dict[x.group(0)], v), "'") + return f"{prefix}{escape_re.sub(lambda x: escape_dict[x.group(0)], v)}'" def escape_redshift_literal(v: Any) -> Any: if isinstance(v, str): # https://www.postgresql.org/docs/9.3/sql-syntax-lexical.html # looks like this is the only thing we need to escape for Postgres > 9.1 - # redshift keeps \ as escape character which is pre 9 behavior + # redshift keeps \ as escape character which is pre-9 behavior. return _escape_extended(v, prefix="'") if isinstance(v, bytes): return f"from_hex('{v.hex()}')" @@ -39,15 +40,12 @@ def escape_redshift_literal(v: Any) -> Any: return f"'{v.isoformat()}'" if isinstance(v, (list, dict)): return "json_parse(%s)" % _escape_extended(json.dumps(v), prefix="'") - if v is None: - return "NULL" - - return str(v) + return "NULL" if v is None else str(v) def escape_postgres_literal(v: Any) -> Any: if isinstance(v, str): - # we escape extended string which behave like the redshift string + # we escape extended string which behaves like the redshift string. return _escape_extended(v) if isinstance(v, (datetime, date, time)): return f"'{v.isoformat()}'" @@ -55,15 +53,12 @@ def escape_postgres_literal(v: Any) -> Any: return _escape_extended(json.dumps(v)) if isinstance(v, bytes): return f"'\\x{v.hex()}'" - if v is None: - return "NULL" - - return str(v) + return "NULL" if v is None else str(v) def escape_duckdb_literal(v: Any) -> Any: if isinstance(v, str): - # we escape extended string which behave like the redshift string + # We escape extended string which behaves like the redshift string. return _escape_extended(v) if isinstance(v, (datetime, date, time)): return f"'{v.isoformat()}'" @@ -71,10 +66,7 @@ def escape_duckdb_literal(v: Any) -> Any: return _escape_extended(json.dumps(v)) if isinstance(v, bytes): return f"from_base64('{base64.b64encode(v).decode('ascii')}')" - if v is None: - return "NULL" - - return str(v) + return "NULL" if v is None else str(v) MS_SQL_ESCAPE_DICT = { @@ -100,17 +92,12 @@ def escape_mssql_literal(v: Any) -> Any: if isinstance(v, bytes): from dlt.destinations.impl.mssql.mssql import VARBINARY_MAX_N - if len(v) <= VARBINARY_MAX_N: - n = str(len(v)) - else: - n = "MAX" + n = str(len(v)) if len(v) <= VARBINARY_MAX_N else "MAX" return f"CONVERT(VARBINARY({n}), '{v.hex()}', 2)" if isinstance(v, bool): return str(int(v)) - if v is None: - return "NULL" - return str(v) + return "NULL" if v is None else str(v) def escape_redshift_identifier(v: str) -> str: @@ -127,8 +114,8 @@ def escape_bigquery_identifier(v: str) -> str: def escape_snowflake_identifier(v: str) -> str: - # Snowcase uppercase all identifiers unless quoted. Match this here so queries on information schema work without issue - # See also https://docs.snowflake.com/en/sql-reference/identifiers-syntax#double-quoted-identifiers + # Snowflake uppercase all identifiers unless quoted. Match this here so queries on information schema work without issue. + # See https://docs.snowflake.com/en/sql-reference/identifiers-syntax#double-quoted-identifiers. return escape_postgres_identifier(v.upper()) @@ -147,7 +134,45 @@ def escape_databricks_literal(v: Any) -> Any: return _escape_extended(json.dumps(v), prefix="'", escape_dict=DATABRICKS_ESCAPE_DICT) if isinstance(v, bytes): return f"X'{v.hex()}'" - if v is None: - return "NULL" + return "NULL" if v is None else str(v) + + +# https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/sql-reference/syntax.md#string +CLICKHOUSE_ESCAPE_DICT = { + "'": "''", + "\\": "\\\\", + "\n": "\\n", + "\t": "\\t", + "\b": "\\b", + "\f": "\\f", + "\r": "\\r", + "\0": "\\0", + "\a": "\\a", + "\v": "\\v", +} + +CLICKHOUSE_ESCAPE_RE = _make_sql_escape_re(CLICKHOUSE_ESCAPE_DICT) + + +def escape_clickhouse_literal(v: Any) -> Any: + if isinstance(v, str): + return _escape_extended( + v, prefix="'", escape_dict=CLICKHOUSE_ESCAPE_DICT, escape_re=CLICKHOUSE_ESCAPE_RE + ) + if isinstance(v, (datetime, date, time)): + return f"'{v.isoformat()}'" + if isinstance(v, (list, dict)): + return _escape_extended( + json.dumps(v), + prefix="'", + escape_dict=CLICKHOUSE_ESCAPE_DICT, + escape_re=CLICKHOUSE_ESCAPE_RE, + ) + if isinstance(v, bytes): + return f"'{v.hex()}'" + return "NULL" if v is None else str(v) + - return str(v) +def escape_clickhouse_identifier(v: str, quote_char: str = "`") -> str: + quote_char = quote_char if quote_char in {'"', "`"} else "`" + return quote_char + v.replace(quote_char, quote_char * 2).replace("\\", "\\\\") + quote_char diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py new file mode 100644 index 0000000000..8d00ea79c6 --- /dev/null +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -0,0 +1,24 @@ +from dlt.common.arithmetics import DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE +from dlt.common.data_writers.escape import escape_clickhouse_identifier, escape_clickhouse_literal +from dlt.common.destination import DestinationCapabilitiesContext + + +def capabilities() -> DestinationCapabilitiesContext: + caps = DestinationCapabilitiesContext() + caps.preferred_loader_file_format = "jsonl" + caps.supported_loader_file_formats = ["jsonl", "parquet"] + caps.preferred_staging_file_format = "parquet" + caps.supported_staging_file_formats = ["parquet", "jsonl"] + caps.escape_identifier = escape_clickhouse_identifier + caps.escape_literal = escape_clickhouse_literal + caps.decimal_precision = (DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE) + caps.wei_precision = (76, 38) + caps.max_identifier_length = 1024 + caps.max_column_identifier_length = 300 + caps.max_query_length = 1024 * 1024 + caps.is_max_query_length_in_bytes = False + caps.max_text_data_type_length = 10 * 1024 * 1024 + caps.is_max_text_data_type_length_in_bytes = True + caps.supports_ddl_transactions = False + + return caps diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py new file mode 100644 index 0000000000..c29dbc0722 --- /dev/null +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -0,0 +1,39 @@ +from typing import TYPE_CHECKING, ClassVar, List, Optional, Final + +from dlt.common.configuration import configspec +from dlt.common.destination.reference import DestinationClientDwhWithStagingConfiguration + + +@configspec +class ClickhouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): + destination_type: Final[str] = "clickhouse" # type: ignore + + http_timeout: float = 15.0 + file_upload_timeout: float = 30 * 60.0 + retry_deadline: float = 60.0 + + __config_gen_annotations__: ClassVar[List[str]] = [] + + if TYPE_CHECKING: + + def __init__( + self, + *, + dataset_name: str = None, + default_schema_name: Optional[str], + http_timeout: float = 15.0, + file_upload_timeout: float = 30 * 60.0, + retry_deadline: float = 60.0, + destination_name: str = None, + environment: str = None + ) -> None: + super().__init__( + dataset_name=dataset_name, + default_schema_name=default_schema_name, + destination_name=destination_name, + environment=environment, + ) + self.retry_deadline = retry_deadline + self.file_upload_timeout = file_upload_timeout + self.http_timeout = http_timeout + ... diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dlt/destinations/job_client_impl.py b/dlt/destinations/job_client_impl.py index 7896fa2cc4..a7de0eb38b 100644 --- a/dlt/destinations/job_client_impl.py +++ b/dlt/destinations/job_client_impl.py @@ -67,7 +67,7 @@ class SqlLoadJob(LoadJob): - """A job executing sql statement, without followup trait""" + """A job executing sql statement, without followup trait.""" def __init__(self, file_path: str, sql_client: SqlClientBase[Any]) -> None: super().__init__(FileStorage.get_file_name_from_file_path(file_path)) @@ -98,13 +98,10 @@ def exception(self) -> str: raise NotImplementedError() def _string_containts_ddl_queries(self, sql: str) -> bool: - for cmd in DDL_COMMANDS: - if re.search(cmd, sql, re.IGNORECASE): - return True - return False + return any(re.search(cmd, sql, re.IGNORECASE) for cmd in DDL_COMMANDS) def _split_fragments(self, sql: str) -> List[str]: - return [s + (";" if not s.endswith(";") else "") for s in sql.split(";") if s.strip()] + return [s + ("" if s.endswith(";") else ";") for s in sql.split(";") if s.strip()] @staticmethod def is_sql_job(file_path: str) -> bool: @@ -496,7 +493,7 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non @staticmethod def _gen_not_null(v: bool) -> str: - return "NOT NULL" if not v else "" + return "" if v else "NOT NULL" def _create_table_update( self, table_name: str, storage_columns: TTableSchemaColumns @@ -518,13 +515,9 @@ def _row_to_schema_info(self, query: str, *args: Any) -> StorageSchemaInfo: # get schema as string # TODO: Re-use decompress/compress_state() implementation from dlt.pipeline.state_sync schema_str: str = row[5] - try: + with contextlib.suppress(ValueError): schema_bytes = base64.b64decode(schema_str, validate=True) schema_str = zlib.decompress(schema_bytes).decode("utf-8") - except ValueError: - # not a base64 string - pass - # make utc datetime inserted_at = pendulum.instance(row[4]) @@ -539,13 +532,13 @@ def _replace_schema_in_storage(self, schema: Schema) -> None: self._update_schema_in_storage(schema) def _update_schema_in_storage(self, schema: Schema) -> None: - # make sure that schema being saved was not modified from the moment it was loaded from storage + # Make sure the schema being saved wasn't modified from the moment it was loaded from storage. version_hash = schema.version_hash if version_hash != schema.stored_version_hash: raise DestinationSchemaTampered(schema.name, version_hash, schema.stored_version_hash) # get schema string or zip schema_str = json.dumps(schema.to_dict()) - # TODO: not all databases store data as utf-8 but this exception is mostly for redshift + # TODO: not all databases store data as utf-8 but this exception is mostly for redshift. schema_bytes = schema_str.encode("utf-8") if len(schema_bytes) > self.capabilities.max_text_data_type_length: # compress and to base64 From 0407ab8b251e1f82d526b96e36f94b7b9084c3b7 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 11 Mar 2024 22:11:57 +0200 Subject: [PATCH 003/127] Update preliminary Clickhouse configurations #1055 Signed-off-by: Marcel Coetzee --- .../specs/connection_string_credentials.py | 10 ++- dlt/common/destination/reference.py | 4 +- dlt/destinations/impl/clickhouse/__init__.py | 18 +++-- .../impl/clickhouse/configuration.py | 77 +++++++++++++++---- 4 files changed, 81 insertions(+), 28 deletions(-) diff --git a/dlt/common/configuration/specs/connection_string_credentials.py b/dlt/common/configuration/specs/connection_string_credentials.py index 9dd6f00942..0574c7a0ac 100644 --- a/dlt/common/configuration/specs/connection_string_credentials.py +++ b/dlt/common/configuration/specs/connection_string_credentials.py @@ -23,13 +23,15 @@ def parse_native_representation(self, native_value: Any) -> None: raise InvalidConnectionString(self.__class__, native_value, self.drivername) try: url = make_url(native_value) - # update only values that are not None + # Update only values that are not None. self.update({k: v for k, v in url._asdict().items() if v is not None}) if self.query is not None: - # query may be immutable so make it mutable + # Query may be immutable so make it mutable. self.query = dict(self.query) - except Exception: - raise InvalidConnectionString(self.__class__, native_value, self.drivername) + except Exception as e: + raise InvalidConnectionString( + self.__class__, native_value, self.drivername + ) from e def on_resolved(self) -> None: if self.password: diff --git a/dlt/common/destination/reference.py b/dlt/common/destination/reference.py index 5e698347e5..6a945e5f1f 100644 --- a/dlt/common/destination/reference.py +++ b/dlt/common/destination/reference.py @@ -179,10 +179,10 @@ def __init__( @configspec class DestinationClientDwhWithStagingConfiguration(DestinationClientDwhConfiguration): - """Configuration of a destination that can take data from staging destination""" + """Configuration of a destination that can take data from a staging destination.""" staging_config: Optional[DestinationClientStagingConfiguration] = None - """configuration of the staging, if present, injected at runtime""" + """Configuration of the staging, if present, injected at runtime.""" if TYPE_CHECKING: def __init__( diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 8d00ea79c6..7615669e61 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -5,20 +5,22 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() + caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["jsonl", "parquet"] + caps.supported_loader_file_formats = ["jsonl", "parquet", "insert_values"] caps.preferred_staging_file_format = "parquet" caps.supported_staging_file_formats = ["parquet", "jsonl"] + caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal + caps.decimal_precision = (DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE) - caps.wei_precision = (76, 38) - caps.max_identifier_length = 1024 - caps.max_column_identifier_length = 300 - caps.max_query_length = 1024 * 1024 - caps.is_max_query_length_in_bytes = False - caps.max_text_data_type_length = 10 * 1024 * 1024 - caps.is_max_text_data_type_length_in_bytes = True + + # Clickhouse has limited support for transactional semantics, especially for `ReplicatedMergeTree`, + # the default ClickHouse cloud engine. + # https://clickhouse-driver.readthedocs.io/en/latest/dbapi.html#clickhouse_driver.dbapi.connection.Connection.commit + # https://clickhouse.com/docs/en/guides/developer/transactional#transactions-commit-and-rollback caps.supports_ddl_transactions = False + caps.supports_transactions = False return caps diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index c29dbc0722..1f25040f99 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -1,39 +1,88 @@ -from typing import TYPE_CHECKING, ClassVar, List, Optional, Final +from typing import ClassVar, List, Any, Final, TYPE_CHECKING from dlt.common.configuration import configspec -from dlt.common.destination.reference import DestinationClientDwhWithStagingConfiguration +from dlt.common.configuration.specs import ConnectionStringCredentials +from dlt.common.destination.reference import ( + DestinationClientDwhWithStagingConfiguration, +) +from dlt.common.libs.sql_alchemy import URL +from dlt.common.utils import digest128 + + +@configspec +class ClickhouseCredentials(ConnectionStringCredentials): + drivername: str = "clickhouse" + host: str + """Host with running ClickHouse server.""" + port: int = 9000 + """Port ClickHouse server is bound to. Defaults to 9000.""" + user: str = "default" + """Database user. Defaults to 'default'.""" + database: str = "default" + """database connect to. Defaults to 'default'.""" + connect_timeout: int = 10 + """Timeout for establishing connection. Defaults to 10 seconds.""" + send_receive_timeout: int = 300 + """Timeout for sending and receiving data. Defaults to 300 seconds.""" + + __config_gen_annotations__: ClassVar[List[str]] = [ + "host", + "port", + "user", + "database", + "connect_timeout", + "send_receive_timeout", + ] + + def parse_native_representation(self, native_value: Any) -> None: + super().parse_native_representation(native_value) + self.connect_timeout = int(self.query.get("connect_timeout", self.connect_timeout)) + self.send_receive_timeout = int( + self.query.get("send_receive_timeout", self.send_receive_timeout) + ) + if not self.is_partial(): + self.resolve() + + def to_url(self) -> URL: + url = super().to_url() + url.update_query_pairs( + [ + ("connect_timeout", str(self.connect_timeout)), + ("send_receive_timeout", str(self.send_receive_timeout)), + ] + ) + return url @configspec class ClickhouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): - destination_type: Final[str] = "clickhouse" # type: ignore + destination_type: Final[str] = "clickhouse" # type: ignore[misc] + credentials: ClickhouseCredentials - http_timeout: float = 15.0 - file_upload_timeout: float = 30 * 60.0 - retry_deadline: float = 60.0 + create_indexes: bool = True - __config_gen_annotations__: ClassVar[List[str]] = [] + def fingerprint(self) -> str: + """Returns a fingerprint of host part of a connection string.""" + if self.credentials and self.credentials.host: + return digest128(self.credentials.host) + return "" if TYPE_CHECKING: def __init__( self, *, + credentials: ClickhouseCredentials = None, dataset_name: str = None, - default_schema_name: Optional[str], - http_timeout: float = 15.0, - file_upload_timeout: float = 30 * 60.0, - retry_deadline: float = 60.0, + default_schema_name: str = None, destination_name: str = None, environment: str = None ) -> None: super().__init__( + credentials=credentials, dataset_name=dataset_name, default_schema_name=default_schema_name, destination_name=destination_name, environment=environment, ) - self.retry_deadline = retry_deadline - self.file_upload_timeout = file_upload_timeout - self.http_timeout = http_timeout ... From d503c142b58979fd2508fdb94052e1bf32ea0046 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 11 Mar 2024 22:12:57 +0200 Subject: [PATCH 004/127] Format #1055 Signed-off-by: Marcel Coetzee --- .../configuration/specs/connection_string_credentials.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/dlt/common/configuration/specs/connection_string_credentials.py b/dlt/common/configuration/specs/connection_string_credentials.py index 0574c7a0ac..54007bb127 100644 --- a/dlt/common/configuration/specs/connection_string_credentials.py +++ b/dlt/common/configuration/specs/connection_string_credentials.py @@ -29,9 +29,7 @@ def parse_native_representation(self, native_value: Any) -> None: # Query may be immutable so make it mutable. self.query = dict(self.query) except Exception as e: - raise InvalidConnectionString( - self.__class__, native_value, self.drivername - ) from e + raise InvalidConnectionString(self.__class__, native_value, self.drivername) from e def on_resolved(self) -> None: if self.password: From ed218e549734dfae0f9ce0f249b50069b9e3c763 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 12 Mar 2024 15:10:55 +0200 Subject: [PATCH 005/127] Finalize wireframing #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 32 ++++++++++++++ dlt/destinations/impl/clickhouse/factory.py | 44 +++++++++++++++++++ 2 files changed, 76 insertions(+) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index e69de29bb2..cb9a72493b 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -0,0 +1,32 @@ +from typing import ClassVar, Optional + +from dlt.common.destination import DestinationCapabilitiesContext +from dlt.common.destination.reference import SupportsStagingDestination +from dlt.common.schema import Schema, TColumnSchema +from dlt.common.schema.typing import TColumnType, TTableFormat +from dlt.destinations.impl.clickhouse import capabilities +from dlt.destinations.impl.clickhouse.configuration import ClickhouseClientConfiguration +from dlt.destinations.job_client_impl import SqlJobClientWithStaging +from dlt.destinations.sql_client import SqlClientBase +from dlt.destinations.typing import TNativeConn + + +class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): + capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() + + def __init__( + self, + schema: Schema, + config: ClickhouseClientConfiguration, + sql_client: SqlClientBase[TNativeConn], + ) -> None: + super().__init__(schema, config, sql_client) + ... + + def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: + pass + + def _from_db_type( + self, db_type: str, precision: Optional[int], scale: Optional[int] + ) -> TColumnType: + pass diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index e69de29bb2..0734abdf56 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -0,0 +1,44 @@ +import typing as t + +from dlt.common.destination import Destination, DestinationCapabilitiesContext +from dlt.destinations.impl.clickhouse import capabilities +from dlt.destinations.impl.clickhouse.configuration import ( + ClickhouseClientConfiguration, + ClickhouseCredentials, +) + + +if t.TYPE_CHECKING: + from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient + + +# noinspection PyPep8Naming +class clickhouse(Destination[ClickhouseClientConfiguration, ClickhouseClient]): + spec = ClickhouseClientConfiguration + + def capabilities(self) -> DestinationCapabilitiesContext: + return capabilities() + + @property + def client_class(self) -> t.Type["ClickhouseClient"]: + from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient + + return ClickhouseClient + + def __init__( + self, + credentials: ClickhouseCredentials = None, + dataset_name: str = None, + default_schema_name: str = None, + destination_name: str = None, + environment: str = None, + **kwargs: t.Any, + ) -> None: + super().__init__( + credentials=credentials, + dataset_name=dataset_name, + default_schema_name=default_schema_name, + destination_name=destination_name, + environment=environment, + **kwargs, + ) From 81091c726492e37dd5b1dde58acae1c1936dba28 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 12 Mar 2024 16:52:02 +0200 Subject: [PATCH 006/127] Wireframe ClickhouseSqlClient #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/sql_client.py | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index e69de29bb2..6636da385d 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -0,0 +1,50 @@ +from typing import AnyStr, Any, ContextManager, Optional, Sequence + +import clickhouse_driver +from clickhouse_driver.dbapi.extras import DictCursor + +from dlt.destinations.sql_client import DBApiCursorImpl, SqlClientBase +from dlt.destinations.typing import DBTransaction, DBApiCursor, TNativeConn + + +class ClickhouseDBApiCursorImpl(DBApiCursorImpl): + native_cursor: DictCursor + + +class ClickhouseSqlClient(SqlClientBase[clickhouse_driver.Client], DBTransaction): + def open_connection(self) -> TNativeConn: + pass + + def close_connection(self) -> None: + pass + + def begin_transaction(self) -> ContextManager[DBTransaction]: + pass + + @property + def native_connection(self) -> TNativeConn: + pass + + def execute_sql( + self, sql: AnyStr, *args: Any, **kwargs: Any + ) -> Optional[Sequence[Sequence[Any]]]: + pass + + def execute_query( + self, query: AnyStr, *args: Any, **kwargs: Any + ) -> ContextManager[DBApiCursor]: + pass + + def fully_qualified_dataset_name(self, escape: bool = True) -> str: + pass + + @staticmethod + def _make_database_exception(ex: Exception) -> Exception: + pass + + +class TransactionsNotImplementedError(NotImplementedError): + def __init__(self) -> None: + super().__init__( + "Clickhouse does not support transaction management." + ) From 7b7edffd76aa6c5eb595b9234e4cc78200013c11 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 12 Mar 2024 19:23:54 +0200 Subject: [PATCH 007/127] Refactor Clickhouse SqlClient wireframing and update capabilities #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 14 ++--- .../impl/clickhouse/clickhouse.py | 34 ++++++++++++ .../impl/clickhouse/sql_client.py | 54 +++++++++++++------ 3 files changed, 81 insertions(+), 21 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 1c3d1bb5c1..d91e7c07f8 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -6,10 +6,10 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() - caps.preferred_loader_file_format = "jsonl" + caps.preferred_loader_file_format = "parquet" caps.supported_loader_file_formats = ["jsonl", "parquet", "insert_values"] - caps.preferred_staging_file_format = "jsonl" - caps.supported_staging_file_formats = ["parquet", "jsonl"] + caps.preferred_staging_file_format = "parquet" + caps.supported_staging_file_formats = ["jsonl", "parquet"] caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal @@ -22,11 +22,13 @@ def capabilities() -> DestinationCapabilitiesContext: caps.is_max_query_length_in_bytes = True caps.max_query_length = 262144 - # Clickhouse has limited support for transactional semantics, especially for `ReplicatedMergeTree`, - # the default ClickHouse cloud engine. + # Clickhouse has limited support for transactional semantics, especially for `ReplicatedMergeTree`, the default ClickHouse Cloud engine. + # It does, however, provide atomicity for individual DDL operations like `ALTER TABLE`. # https://clickhouse-driver.readthedocs.io/en/latest/dbapi.html#clickhouse_driver.dbapi.connection.Connection.commit # https://clickhouse.com/docs/en/guides/developer/transactional#transactions-commit-and-rollback - caps.supports_ddl_transactions = False caps.supports_transactions = False + caps.supports_ddl_transactions = ( + True # Not as part of a transaction, but single atomic DDL operations are supported. + ) return caps diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index cb9a72493b..473ee4b353 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -8,9 +8,43 @@ from dlt.destinations.impl.clickhouse.configuration import ClickhouseClientConfiguration from dlt.destinations.job_client_impl import SqlJobClientWithStaging from dlt.destinations.sql_client import SqlClientBase +from dlt.destinations.type_mapping import TypeMapper from dlt.destinations.typing import TNativeConn +class ClickhouseTypeMapper(TypeMapper): + sct_to_unbound_dbt = { + "complex": "JSON", + "text": "String", + "double": "Float64", + "bool": "Boolean", + "date": "Date", + "timestamp": "DateTime", + "bigint": "Int64", + "binary": "String", + "wei": "Decimal", + } + + sct_to_dbt = { + "decimal": "Decimal(%i,%i)", + "wei": "Decimal(%i,%i)", + } + + dbt_to_sct = { + "String": "text", + "Float64": "double", + "Boolean": "bool", + "Date": "date", + "DateTime": "timestamp", + "Int64": "bigint", + "JSON": "complex", + "Decimal": "decimal", + } + + def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = None) -> str: + return "DateTime" + + class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 6636da385d..4b37e013ca 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -1,28 +1,41 @@ -from typing import AnyStr, Any, ContextManager, Optional, Sequence +from contextlib import contextmanager +from typing import ( + Iterator, + AnyStr, + Any, + ContextManager, + Optional, + Sequence, + ClassVar, +) -import clickhouse_driver -from clickhouse_driver.dbapi.extras import DictCursor +import clickhouse_driver.dbapi as clickhouse_lib # type: ignore[import-untyped] +from clickhouse_driver.dbapi.connection import Connection # type: ignore[import-untyped] +from clickhouse_driver.dbapi.extras import DictCursor # type: ignore[import-untyped] -from dlt.destinations.sql_client import DBApiCursorImpl, SqlClientBase -from dlt.destinations.typing import DBTransaction, DBApiCursor, TNativeConn +from dlt.common.destination import DestinationCapabilitiesContext +from dlt.common.runtime import logger +from dlt.destinations.impl.clickhouse import capabilities +from dlt.destinations.sql_client import DBApiCursorImpl, SqlClientBase, raise_database_error +from dlt.destinations.typing import DBTransaction, DBApiCursor, TNativeConn, DBApi class ClickhouseDBApiCursorImpl(DBApiCursorImpl): native_cursor: DictCursor -class ClickhouseSqlClient(SqlClientBase[clickhouse_driver.Client], DBTransaction): - def open_connection(self) -> TNativeConn: - pass +class ClickhouseSqlClient(SqlClientBase[Connection], DBTransaction): + dbapi: ClassVar[DBApi] = clickhouse_lib + capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() - def close_connection(self) -> None: + @property + def native_connection(self) -> TNativeConn: # type: ignore pass - def begin_transaction(self) -> ContextManager[DBTransaction]: + def open_connection(self) -> Connection: pass - @property - def native_connection(self) -> TNativeConn: + def close_connection(self) -> None: pass def execute_sql( @@ -42,9 +55,20 @@ def fully_qualified_dataset_name(self, escape: bool = True) -> str: def _make_database_exception(ex: Exception) -> Exception: pass + @contextmanager + @raise_database_error + def begin_transaction(self) -> Iterator[DBTransaction]: + logger.warning( + "Clickhouse does not support transactions! Each SQL statement is auto-committed" + " separately." + ) + yield self + + @raise_database_error + def rollback_transaction(self) -> None: + raise NotImplementedError("You cannot rollback Clickhouse SQL statements.") + class TransactionsNotImplementedError(NotImplementedError): def __init__(self) -> None: - super().__init__( - "Clickhouse does not support transaction management." - ) + super().__init__("Clickhouse does not support transaction management.") From 5f55bab3b2a8335aa2c8b55ddcc2b27a822d4b37 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 13 Mar 2024 14:42:52 +0200 Subject: [PATCH 008/127] Update error messages and transaction capability in Clickhouse Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 4 +--- dlt/destinations/impl/clickhouse/sql_client.py | 9 +++++---- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index d91e7c07f8..c951579ac0 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -27,8 +27,6 @@ def capabilities() -> DestinationCapabilitiesContext: # https://clickhouse-driver.readthedocs.io/en/latest/dbapi.html#clickhouse_driver.dbapi.connection.Connection.commit # https://clickhouse.com/docs/en/guides/developer/transactional#transactions-commit-and-rollback caps.supports_transactions = False - caps.supports_ddl_transactions = ( - True # Not as part of a transaction, but single atomic DDL operations are supported. - ) + caps.supports_ddl_transactions = False return caps diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 4b37e013ca..7fd0e96888 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -59,16 +59,17 @@ def _make_database_exception(ex: Exception) -> Exception: @raise_database_error def begin_transaction(self) -> Iterator[DBTransaction]: logger.warning( - "Clickhouse does not support transactions! Each SQL statement is auto-committed" - " separately." + "Clickhouse does not support transactions! Each statement is auto-committed separately." ) yield self @raise_database_error def rollback_transaction(self) -> None: - raise NotImplementedError("You cannot rollback Clickhouse SQL statements.") + raise NotImplementedError("You cannot rollback Clickhouse transactions.") class TransactionsNotImplementedError(NotImplementedError): def __init__(self) -> None: - super().__init__("Clickhouse does not support transaction management.") + super().__init__( + "Clickhouse does not support transactions! Each statement is auto-committed separately." + ) From c9fb02e93f521a80103c892a2da51d2c03b8674d Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 13 Mar 2024 22:34:39 +0200 Subject: [PATCH 009/127] Update Clickhouse configuration and factory Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/configuration.py | 12 ++++++--- dlt/destinations/impl/clickhouse/factory.py | 25 ++++++++++++++----- 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 1f25040f99..bedd41bc2e 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -59,7 +59,14 @@ class ClickhouseClientConfiguration(DestinationClientDwhWithStagingConfiguration destination_type: Final[str] = "clickhouse" # type: ignore[misc] credentials: ClickhouseCredentials - create_indexes: bool = True + # Primary key columns are used to build a sparse primary index which allows for efficient data retrieval, + # but they do not enforce uniqueness constraints. It permits duplicate values even for the primary key + # columns within the same granule. + # See: https://clickhouse.com/docs/en/optimize/sparse-primary-indexes + create_indexes: bool = False + """Whether `primary_key` and `unique` column hints are applied.""" + + __config_gen_annotations__: ClassVar[List[str]] = ["create_indexes"] def fingerprint(self) -> str: """Returns a fingerprint of host part of a connection string.""" @@ -74,14 +81,13 @@ def __init__( *, credentials: ClickhouseCredentials = None, dataset_name: str = None, - default_schema_name: str = None, + create_indexes: bool = False, destination_name: str = None, environment: str = None ) -> None: super().__init__( credentials=credentials, dataset_name=dataset_name, - default_schema_name=default_schema_name, destination_name=destination_name, environment=environment, ) diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 0734abdf56..2df9fa94a6 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -1,5 +1,7 @@ import typing as t +from clickhouse_driver.dbapi import Connection + from dlt.common.destination import Destination, DestinationCapabilitiesContext from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.configuration import ( @@ -20,25 +22,36 @@ def capabilities(self) -> DestinationCapabilitiesContext: return capabilities() @property - def client_class(self) -> t.Type["ClickhouseClient"]: + def client_class(self) -> t.Type[ClickhouseClient]: from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient return ClickhouseClient def __init__( self, - credentials: ClickhouseCredentials = None, - dataset_name: str = None, - default_schema_name: str = None, + credentials: t.Union[ClickhouseCredentials, str, t.Dict[str, t.Any], Connection] = None, destination_name: str = None, environment: str = None, + create_indexes: bool = False, **kwargs: t.Any, ) -> None: + """Configure the Clickhouse destination to use in a pipeline. + + All arguments provided here supersede other configuration sources such as environment + variables and dlt config files. + + Args: + credentials: Credentials to connect to the clickhouse database. + Can be an instance of `ClickhouseCredentials`, or a connection string + in the format `clickhouse://user:password@host:port/database`. + create_indexes: Maps directly to the `create_indexes` attribute of the + `ClickhouseClientConfiguration` object. + **kwargs: Additional arguments passed to the destination config. + """ super().__init__( credentials=credentials, - dataset_name=dataset_name, - default_schema_name=default_schema_name, destination_name=destination_name, environment=environment, + create_indexes=create_indexes, **kwargs, ) From f53ca527e97266a2a398dfc50aeaa6a37ddba5ba Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 14 Mar 2024 12:35:23 +0200 Subject: [PATCH 010/127] Update Signed-off-by: Marcel Coetzee --- dlt/common/destination/reference.py | 33 +++++++++---------- dlt/destinations/impl/clickhouse/__init__.py | 7 +++- .../impl/clickhouse/clickhouse.py | 1 + dlt/destinations/impl/clickhouse/factory.py | 5 +++ 4 files changed, 27 insertions(+), 19 deletions(-) diff --git a/dlt/common/destination/reference.py b/dlt/common/destination/reference.py index 6a945e5f1f..b6f06024cc 100644 --- a/dlt/common/destination/reference.py +++ b/dlt/common/destination/reference.py @@ -1,4 +1,7 @@ +import datetime # noqa: 251 +import inspect from abc import ABC, abstractmethod +from copy import deepcopy from importlib import import_module from types import TracebackType from typing import ( @@ -20,11 +23,13 @@ Generic, Final, ) -import datetime # noqa: 251 -from copy import deepcopy -import inspect from dlt.common import logger +from dlt.common.configuration import configspec, resolve_configuration, known_sections +from dlt.common.configuration.accessors import config +from dlt.common.configuration.specs import BaseConfiguration, CredentialsConfiguration +from dlt.common.configuration.specs import GcpCredentials, AwsCredentialsWithoutDefaults +from dlt.common.destination.capabilities import DestinationCapabilitiesContext from dlt.common.exceptions import ( IdentifierTooLongException, InvalidDestinationReference, @@ -32,6 +37,7 @@ ) from dlt.common.schema import Schema, TTableSchema, TSchemaTables from dlt.common.schema.exceptions import SchemaException +from dlt.common.schema.exceptions import UnknownTableException from dlt.common.schema.utils import ( get_write_disposition, get_table_format, @@ -39,15 +45,9 @@ has_column_with_prop, get_first_column_name_with_prop, ) -from dlt.common.configuration import configspec, resolve_configuration, known_sections -from dlt.common.configuration.specs import BaseConfiguration, CredentialsConfiguration -from dlt.common.configuration.accessors import config -from dlt.common.destination.capabilities import DestinationCapabilitiesContext from dlt.common.schema.utils import is_complete_column -from dlt.common.schema.exceptions import UnknownTableException from dlt.common.storages import FileStorage from dlt.common.storages.load_storage import ParsedLoadJobFileName -from dlt.common.configuration.specs import GcpCredentials, AwsCredentialsWithoutDefaults TLoaderReplaceStrategy = Literal["truncate-and-insert", "insert-from-staging", "staging-optimized"] @@ -131,9 +131,7 @@ def normalize_dataset_name(self, schema: Schema) -> str: ) return ( - self.dataset_name - if not self.dataset_name - else schema.naming.normalize_table_identifier(self.dataset_name) + schema.naming.normalize_table_identifier(self.dataset_name) if self.dataset_name else self.dataset_name ) if TYPE_CHECKING: @@ -420,8 +418,8 @@ def prepare_load_table( if "table_format" not in table: table["table_format"] = get_table_format(self.schema.tables, table_name) return table - except KeyError: - raise UnknownTableException(table_name) + except KeyError as e: + raise UnknownTableException(table_name) from e class WithStateSync(ABC): @@ -504,7 +502,7 @@ def destination_name(self) -> str: @property def destination_type(self) -> str: - full_path = self.__class__.__module__ + "." + self.__class__.__qualname__ + full_path = f"{self.__class__.__module__}.{self.__class__.__qualname__}" return Destination.normalize_type(full_path) @property @@ -519,13 +517,12 @@ def client_class(self) -> Type[TDestinationClient]: def configuration(self, initial_config: TDestinationConfig) -> TDestinationConfig: """Get a fully resolved destination config from the initial config""" - config = resolve_configuration( + return resolve_configuration( initial_config, sections=(known_sections.DESTINATION, self.destination_name), # Already populated values will supersede resolved env config explicit_value=self.config_params, ) - return config @staticmethod def to_name(ref: TDestinationReferenceArg) -> str: @@ -541,7 +538,7 @@ def to_name(ref: TDestinationReferenceArg) -> str: def normalize_type(destination_type: str) -> str: """Normalizes destination type string into a canonical form. Assumes that type names without dots correspond to build in destinations.""" if "." not in destination_type: - destination_type = "dlt.destinations." + destination_type + destination_type = f"dlt.destinations.{destination_type}" # the next two lines shorten the dlt internal destination paths to dlt.destinations. name = Destination.to_name(destination_type) destination_type = destination_type.replace( diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index c951579ac0..35ef6dad35 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -14,9 +14,12 @@ def capabilities() -> DestinationCapabilitiesContext: caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal - # Muse use 'Decimal128' with these defaults. + caps.schema_supports_numeric_precision = True + # Use 'Decimal128' with these defaults. # https://clickhouse.com/docs/en/sql-reference/data-types/decimal caps.decimal_precision = (DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE) + # Use 'Decimal256' with these defaults. + caps.wei_precision = (76, 0) # https://clickhouse.com/docs/en/operations/settings/settings#max_query_size caps.is_max_query_length_in_bytes = True @@ -29,4 +32,6 @@ def capabilities() -> DestinationCapabilitiesContext: caps.supports_transactions = False caps.supports_ddl_transactions = False + caps.supports_truncate_command = True + return caps diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 473ee4b353..21c9eefe63 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -28,6 +28,7 @@ class ClickhouseTypeMapper(TypeMapper): sct_to_dbt = { "decimal": "Decimal(%i,%i)", "wei": "Decimal(%i,%i)", + "timestamp": "DateTime(%i)", } dbt_to_sct = { diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 2df9fa94a6..6f59557d71 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -55,3 +55,8 @@ def __init__( create_indexes=create_indexes, **kwargs, ) + + + + + From c1f106bc673aeec93dbd1f63e7a6837e9fc20091 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 14 Mar 2024 19:26:36 +0200 Subject: [PATCH 011/127] Update identifier escaping logic #1055 Signed-off-by: Marcel Coetzee --- dlt/common/data_writers/escape.py | 41 +------------------- dlt/destinations/impl/clickhouse/__init__.py | 15 ++++--- 2 files changed, 9 insertions(+), 47 deletions(-) diff --git a/dlt/common/data_writers/escape.py b/dlt/common/data_writers/escape.py index 190ce4b2d7..a606906774 100644 --- a/dlt/common/data_writers/escape.py +++ b/dlt/common/data_writers/escape.py @@ -137,42 +137,5 @@ def escape_databricks_literal(v: Any) -> Any: return "NULL" if v is None else str(v) -# https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/sql-reference/syntax.md#string -CLICKHOUSE_ESCAPE_DICT = { - "'": "''", - "\\": "\\\\", - "\n": "\\n", - "\t": "\\t", - "\b": "\\b", - "\f": "\\f", - "\r": "\\r", - "\0": "\\0", - "\a": "\\a", - "\v": "\\v", -} - -CLICKHOUSE_ESCAPE_RE = _make_sql_escape_re(CLICKHOUSE_ESCAPE_DICT) - - -def escape_clickhouse_literal(v: Any) -> Any: - if isinstance(v, str): - return _escape_extended( - v, prefix="'", escape_dict=CLICKHOUSE_ESCAPE_DICT, escape_re=CLICKHOUSE_ESCAPE_RE - ) - if isinstance(v, (datetime, date, time)): - return f"'{v.isoformat()}'" - if isinstance(v, (list, dict)): - return _escape_extended( - json.dumps(v), - prefix="'", - escape_dict=CLICKHOUSE_ESCAPE_DICT, - escape_re=CLICKHOUSE_ESCAPE_RE, - ) - if isinstance(v, bytes): - return f"'{v.hex()}'" - return "NULL" if v is None else str(v) - - -def escape_clickhouse_identifier(v: str, quote_char: str = "`") -> str: - quote_char = quote_char if quote_char in {'"', "`"} else "`" - return quote_char + v.replace(quote_char, quote_char * 2).replace("\\", "\\\\") + quote_char +def escape_clickhouse_identifier(v: str) -> str: + return '`' + v.replace('`', '``').replace("\\", "\\\\") + '"' diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 35ef6dad35..236e83ef54 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -1,18 +1,17 @@ from dlt.common.arithmetics import DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE -from dlt.common.data_writers.escape import escape_clickhouse_identifier, escape_clickhouse_literal +from dlt.common.data_writers.escape import escape_clickhouse_identifier from dlt.common.destination import DestinationCapabilitiesContext def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() - caps.preferred_loader_file_format = "parquet" - caps.supported_loader_file_formats = ["jsonl", "parquet", "insert_values"] - caps.preferred_staging_file_format = "parquet" - caps.supported_staging_file_formats = ["jsonl", "parquet"] + caps.preferred_loader_file_format = "jsonl" + caps.supported_loader_file_formats = ["jsonl", "parquet", "arrow"] + caps.preferred_staging_file_format = "jsonl" + caps.supported_staging_file_formats = ["jsonl", "parquet", "arrow"] caps.escape_identifier = escape_clickhouse_identifier - caps.escape_literal = escape_clickhouse_literal caps.schema_supports_numeric_precision = True # Use 'Decimal128' with these defaults. @@ -25,8 +24,8 @@ def capabilities() -> DestinationCapabilitiesContext: caps.is_max_query_length_in_bytes = True caps.max_query_length = 262144 - # Clickhouse has limited support for transactional semantics, especially for `ReplicatedMergeTree`, the default ClickHouse Cloud engine. - # It does, however, provide atomicity for individual DDL operations like `ALTER TABLE`. + # Clickhouse has limited support for transactional semantics, especially for `ReplicatedMergeTree`, + # the default ClickHouse Cloud engine. It does, however, provide atomicity for individual DDL operations like `ALTER TABLE`. # https://clickhouse-driver.readthedocs.io/en/latest/dbapi.html#clickhouse_driver.dbapi.connection.Connection.commit # https://clickhouse.com/docs/en/guides/developer/transactional#transactions-commit-and-rollback caps.supports_transactions = False From 6fc750fba0356e3b6190f8aa0da0914f919c8d1d Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 15 Mar 2024 18:21:18 +0200 Subject: [PATCH 012/127] Refactor ClickhouseSqlClient for better error handling #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/sql_client.py | 160 +++++++++++++----- 1 file changed, 122 insertions(+), 38 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 7fd0e96888..84d3000832 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -3,73 +3,157 @@ Iterator, AnyStr, Any, - ContextManager, Optional, Sequence, ClassVar, + Union, ) -import clickhouse_driver.dbapi as clickhouse_lib # type: ignore[import-untyped] -from clickhouse_driver.dbapi.connection import Connection # type: ignore[import-untyped] +import clickhouse_driver # type: ignore[import-untyped] +import clickhouse_driver.errors # type: ignore[import-untyped] +from clickhouse_driver.dbapi import Connection # type: ignore[import-untyped] from clickhouse_driver.dbapi.extras import DictCursor # type: ignore[import-untyped] from dlt.common.destination import DestinationCapabilitiesContext from dlt.common.runtime import logger +from dlt.destinations.exceptions import ( + DatabaseUndefinedRelation, + DatabaseTransientException, + DatabaseTerminalException, +) from dlt.destinations.impl.clickhouse import capabilities -from dlt.destinations.sql_client import DBApiCursorImpl, SqlClientBase, raise_database_error -from dlt.destinations.typing import DBTransaction, DBApiCursor, TNativeConn, DBApi +from dlt.destinations.impl.clickhouse.configuration import ClickhouseCredentials +from dlt.destinations.sql_client import ( + DBApiCursorImpl, + SqlClientBase, + raise_database_error, + raise_open_connection_error, +) +from dlt.destinations.typing import DBTransaction, DBApi, DBApiCursor + + +TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE = ( + "Clickhouse does not support transactions! Each statement is auto-committed separately." +) class ClickhouseDBApiCursorImpl(DBApiCursorImpl): native_cursor: DictCursor -class ClickhouseSqlClient(SqlClientBase[Connection], DBTransaction): - dbapi: ClassVar[DBApi] = clickhouse_lib +class ClickhouseSqlClient( + SqlClientBase[clickhouse_driver.dbapi.connection.Connection], DBTransaction +): + dbapi: ClassVar[DBApi] = clickhouse_driver.dbapi.connection.Connection capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() - @property - def native_connection(self) -> TNativeConn: # type: ignore - pass + def __init__(self, dataset_name: str, credentials: ClickhouseCredentials) -> None: + super().__init__(credentials.database, dataset_name) + self._conn: clickhouse_driver.dbapi.connection = None + self.credentials = credentials + self.database_name = credentials.database - def open_connection(self) -> Connection: - pass + def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: + self._conn = clickhouse_driver.connect(dsn=self.credentials.to_native_representation()) + # TODO: Set timezone to UTC explicitly in each query. + # https://github.com/ClickHouse/ClickHouse/issues/699 + return self._conn + @raise_open_connection_error def close_connection(self) -> None: - pass - - def execute_sql( - self, sql: AnyStr, *args: Any, **kwargs: Any - ) -> Optional[Sequence[Sequence[Any]]]: - pass - - def execute_query( - self, query: AnyStr, *args: Any, **kwargs: Any - ) -> ContextManager[DBApiCursor]: - pass - - def fully_qualified_dataset_name(self, escape: bool = True) -> str: - pass - - @staticmethod - def _make_database_exception(ex: Exception) -> Exception: - pass + if self._conn: + self._conn.close() + self._conn = None @contextmanager @raise_database_error def begin_transaction(self) -> Iterator[DBTransaction]: - logger.warning( - "Clickhouse does not support transactions! Each statement is auto-committed separately." - ) yield self + logger.warning(TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE) + + @raise_database_error + def commit_transaction(self) -> None: + logger.warning(TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE) + self._conn.commit() @raise_database_error def rollback_transaction(self) -> None: - raise NotImplementedError("You cannot rollback Clickhouse transactions.") + logger.warning(TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE) + self._conn.rollback() + + @property + def native_connection(self) -> clickhouse_driver.dbapi.connection.Connection: + return self._conn + + def execute_sql( + self, sql: AnyStr, *args: Any, **kwargs: Any + ) -> Optional[Sequence[Sequence[Any]]]: + with self.execute_query(sql, *args, **kwargs) as curr: + return None if curr.description is None else curr.fetchall() + @contextmanager + @raise_database_error + def execute_query(self, query: AnyStr, *args: Any, **kwargs: Any) -> Iterator[DBApiCursor]: + with self._conn.cursor() as curr: + try: + curr.execute(query, args or (kwargs or None)) + yield ClickhouseDBApiCursorImpl(curr) # type: ignore[abstract] + except clickhouse_driver.dbapi.Error: + self.close_connection() + self.open_connection() + raise -class TransactionsNotImplementedError(NotImplementedError): - def __init__(self) -> None: - super().__init__( - "Clickhouse does not support transactions! Each statement is auto-committed separately." + def fully_qualified_dataset_name(self, escape: bool = True) -> str: + database_name = ( + self.capabilities.escape_identifier(self.database_name) + if escape + else self.database_name + ) + dataset_name = ( + self.capabilities.escape_identifier(self.dataset_name) if escape else self.dataset_name ) + return f"{database_name}.{dataset_name}" + + @classmethod + def _make_database_exception(cls, ex: Exception) -> Exception: # type: ignore[return] + if isinstance(ex, clickhouse_driver.dbapi.errors.OperationalError): + if "Code: 57." in str(ex) or "Code: 82." in str(ex): + raise DatabaseTerminalException(ex) + elif "Code: 60." in str(ex) or "Code: 81." in str(ex): + raise DatabaseUndefinedRelation(ex) + elif isinstance( + ex, + ( + clickhouse_driver.dbapi.errors.OperationalError, + clickhouse_driver.dbapi.errors.InternalError, + ), + ): + if term := cls._maybe_make_terminal_exception_from_data_error(ex): + return term + else: + return DatabaseTransientException(ex) + elif isinstance( + ex, + ( + clickhouse_driver.dbapi.errors.DataError, + clickhouse_driver.dbapi.errors.ProgrammingError, + clickhouse_driver.dbapi.errors.IntegrityError, + ), + ): + return DatabaseTerminalException(ex) + elif cls.is_dbapi_exception(ex): + return DatabaseTransientException(ex) + else: + return ex + + @staticmethod + def _maybe_make_terminal_exception_from_data_error( + ex: Union[ + clickhouse_driver.dbapi.errors.DataError, clickhouse_driver.dbapi.errors.InternalError + ] + ) -> Optional[Exception]: + return None + + @staticmethod + def is_dbapi_exception(ex: Exception) -> bool: + return isinstance(ex, clickhouse_driver.dbapi.Error) From dbad9b1d81cd012cf5ef8478f2e748c5672caec6 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 16 Mar 2024 21:46:19 +0200 Subject: [PATCH 013/127] Refine clickhouse destination and basic adapter #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 111 ++++++++++++++++-- .../impl/clickhouse/clickhouse_adapter.py | 59 ++++++++++ .../impl/clickhouse/configuration.py | 3 +- .../impl/clickhouse/sql_client.py | 10 +- 4 files changed, 166 insertions(+), 17 deletions(-) create mode 100644 dlt/destinations/impl/clickhouse/clickhouse_adapter.py diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 21c9eefe63..b5d9624b97 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,15 +1,37 @@ -from typing import ClassVar, Optional +from copy import deepcopy +from typing import ClassVar, Optional, Dict, List, Sequence from dlt.common.destination import DestinationCapabilitiesContext from dlt.common.destination.reference import SupportsStagingDestination from dlt.common.schema import Schema, TColumnSchema -from dlt.common.schema.typing import TColumnType, TTableFormat +from dlt.common.schema.typing import TTableFormat, TTableSchema, TColumnHint, TColumnType from dlt.destinations.impl.clickhouse import capabilities -from dlt.destinations.impl.clickhouse.configuration import ClickhouseClientConfiguration -from dlt.destinations.job_client_impl import SqlJobClientWithStaging -from dlt.destinations.sql_client import SqlClientBase +from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( + TTableEngineType, + TABLE_ENGINE_TYPE_HINT, +) +from dlt.destinations.impl.clickhouse.configuration import ( + ClickhouseClientConfiguration, + ClickhouseCredentials, +) +from dlt.destinations.impl.clickhouse.sql_client import ClickhouseSqlClient +from dlt.destinations.job_client_impl import ( + SqlJobClientWithStaging, + CopyRemoteFileLoadJob, + SqlJobClientBase, +) +from dlt.destinations.sql_jobs import SqlMergeJob from dlt.destinations.type_mapping import TypeMapper -from dlt.destinations.typing import TNativeConn + + +HINT_TO_CLICKHOUSE_ATTR: Dict[TColumnHint, str] = { + "primary_key": "PRIMARY KEY", +} + +TABLE_ENGINE_TYPE_TO_CLICKHOUSE_ATTR: Dict[TTableEngineType, str] = { + "merge_tree": "MergeTree", + "replicated_merge_tree": "ReplicatedMergeTree", +} class ClickhouseTypeMapper(TypeMapper): @@ -46,6 +68,25 @@ def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = return "DateTime" +class ClickhouseCopyFileLoadJob(CopyRemoteFileLoadJob): + def __init__( + self, + table: TTableSchema, + file_path: str, + sql_client: ClickhouseSqlClient, + staging_credentials: Optional[ClickhouseCredentials] = None, + staging_iam_role: str = None, + ) -> None: + self._staging_iam_role = staging_iam_role + super().__init__(table, file_path, sql_client, staging_credentials) + + def exception(self) -> str: + pass + + +class ClickhouseMergeJob(SqlMergeJob): ... + + class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() @@ -53,15 +94,61 @@ def __init__( self, schema: Schema, config: ClickhouseClientConfiguration, - sql_client: SqlClientBase[TNativeConn], ) -> None: - super().__init__(schema, config, sql_client) - ... + self.config: ClickhouseClientConfiguration = config + # TODO: There are no schemas in Clickhouse. No point in having schemas, only dataset names and table names for example "dataset1_mytable". + self.sql_client = ClickhouseSqlClient( + self.config.normalize_dataset_name(self.schema), self.config.credentials + ) + super().__init__(schema, self.config, self.sql_client) + self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) if self.config.create_indexes else {} + self.type_mapper = ClickhouseTypeMapper(self.capabilities) + + def _get_table_update_sql( + self, table_name: str, new_columns: Sequence[TColumnSchema], generate_alter: bool + ) -> List[str]: + table: TTableSchema = self.prepare_load_table(table_name, self.in_staging_mode) + sql = SqlJobClientBase._get_table_update_sql(self, table_name, new_columns, generate_alter) + + if generate_alter: + return sql + + # TODO: Remove `unique` and `primary_key` default implementations. + if primary_key_list := [ + self.capabilities.escape_identifier(c["name"]) + for c in new_columns + if c.get("primary_key") + ]: + sql[0] += "\nPRIMARY KEY (" + ", ".join(primary_key_list) + ")" + else: + sql[0] += "\nPRIMARY KEY tuple()" + + # Default to 'ReplicatedMergeTree' if user didn't explicitly set a table engine hint. + # 'ReplicatedMergeTree' is the only supported engine for Clickhouse Cloud. + sql[0] = f"{sql[0]}\nENGINE = {table.get(TABLE_ENGINE_TYPE_HINT, 'replicated_merge_tree')}" + + return sql def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: - pass + # The primary key definition is defined outside column specification. + hints_str = " ".join( + self.active_hints.get(hint, "") + for hint in self.active_hints.keys() + if c.get(hint, False) is True and hint != "primary_key" + ) + return ( + f"{self.capabilities.escape_identifier(c['name'])} " + f"{self.type_mapper.to_db_type(c)} " + f"{hints_str} " + f"{self._gen_not_null(c.get('nullable', True))}" + ) + + # Clickhouse fields are not nullable by default. + @staticmethod + def _gen_not_null(v: bool) -> str: + return "NULL" if v else "NOT NULL" def _from_db_type( - self, db_type: str, precision: Optional[int], scale: Optional[int] + self, ch_t: str, precision: Optional[int], scale: Optional[int] ) -> TColumnType: - pass + return self.type_mapper.from_db_type(ch_t, precision, scale) diff --git a/dlt/destinations/impl/clickhouse/clickhouse_adapter.py b/dlt/destinations/impl/clickhouse/clickhouse_adapter.py new file mode 100644 index 0000000000..a2e8d39c03 --- /dev/null +++ b/dlt/destinations/impl/clickhouse/clickhouse_adapter.py @@ -0,0 +1,59 @@ +from typing import Any, Literal, Set, get_args, Dict + +from dlt.destinations.utils import ensure_resource +from dlt.extract import DltResource +from dlt.extract.items import TTableHintTemplate + + +TTableEngineType = Literal["merge_tree", "replicated_merge_tree"] + +""" +The table engine (type of table) determines: + +- How and where data is stored, where to write it to, and where to read it from. +- Which queries are supported, and how. +- Concurrent data access. +- Use of indexes, if present. +- Whether multithread request execution is possible. +- Data replication parameters. + +See https://clickhouse.com/docs/en/engines/table-engines. +""" +TABLE_ENGINE_TYPES: Set[TTableEngineType] = set(get_args(TTableEngineType)) +TABLE_ENGINE_TYPE_HINT: Literal["x-table-engine-type"] = "x-table-engine-type" + +def clickhouse_adapter(data: Any, table_engine_type: TTableEngineType = None) -> DltResource: + """Prepares data for the Clickhouse destination by specifying which table engine type + that should be used. + + Args: + data (Any): The data to be transformed. It can be raw data or an instance + of DltResource. If raw data, the function wraps it into a DltResource + object. + table_engine_type (TTableEngineType, optional): The table index type used when creating + the Synapse table. + + Returns: + DltResource: A resource with applied Synapse-specific hints. + + Raises: + ValueError: If input for `table_engine_type` is invalid. + + Examples: + >>> data = [{"name": "Alice", "description": "Software Developer"}] + >>> clickhouse_adapter(data, table_engine_type="merge_tree") + [DltResource with hints applied] + """ + resource = ensure_resource(data) + + additional_table_hints: Dict[str, TTableHintTemplate[Any]] = {} + if table_engine_type is not None: + if table_engine_type not in TABLE_ENGINE_TYPES: + allowed_types = ", ".join(TABLE_ENGINE_TYPES) + raise ValueError( + f"Table engine type {table_engine_type} is invalid. Allowed table engine types are:" + f" {allowed_types}." + ) + additional_table_hints[TABLE_ENGINE_TYPE_HINT] = table_engine_type + resource.apply_hints(additional_table_hints=additional_table_hints) + return resource diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index bedd41bc2e..359f08e47e 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -64,7 +64,8 @@ class ClickhouseClientConfiguration(DestinationClientDwhWithStagingConfiguration # columns within the same granule. # See: https://clickhouse.com/docs/en/optimize/sparse-primary-indexes create_indexes: bool = False - """Whether `primary_key` and `unique` column hints are applied.""" + """Whether `primary_key` column hint is applied. Note that Clickhouse has no unique constraint, + and primary keys don't guarantee uniqueness.""" __config_gen_annotations__: ClassVar[List[str]] = ["create_indexes"] diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 84d3000832..1323dc2da9 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -93,11 +93,13 @@ def execute_sql( @contextmanager @raise_database_error - def execute_query(self, query: AnyStr, *args: Any, **kwargs: Any) -> Iterator[DBApiCursor]: - with self._conn.cursor() as curr: + def execute_query(self, query: AnyStr, *args: Any, **kwargs: Any) -> Iterator[ClickhouseDBApiCursorImpl]: + cur: clickhouse_driver.dbapi.connection.Cursor + with self._conn.cursor() as cur: try: - curr.execute(query, args or (kwargs or None)) - yield ClickhouseDBApiCursorImpl(curr) # type: ignore[abstract] + # TODO: Clickhouse driver only accepts pyformat `...WHERE name=%(name)s` parameter marker arguments. + cur.execute(query, args or (kwargs or None)) + yield ClickhouseDBApiCursorImpl(cur) # type: ignore[abstract] except clickhouse_driver.dbapi.Error: self.close_connection() self.open_connection() From 16df85116af3a10089c641d27c5266dd42fc9cf7 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 16 Mar 2024 22:59:38 +0200 Subject: [PATCH 014/127] Finish ClickhouseClient Signed-off-by: Marcel Coetzee --- dlt/common/data_writers/escape.py | 2 +- dlt/common/destination/reference.py | 4 +++- dlt/destinations/impl/clickhouse/clickhouse_adapter.py | 1 + dlt/destinations/impl/clickhouse/factory.py | 7 +------ dlt/destinations/impl/clickhouse/sql_client.py | 4 +++- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/dlt/common/data_writers/escape.py b/dlt/common/data_writers/escape.py index a606906774..3535ad4ef9 100644 --- a/dlt/common/data_writers/escape.py +++ b/dlt/common/data_writers/escape.py @@ -138,4 +138,4 @@ def escape_databricks_literal(v: Any) -> Any: def escape_clickhouse_identifier(v: str) -> str: - return '`' + v.replace('`', '``').replace("\\", "\\\\") + '"' + return "`" + v.replace("`", "``").replace("\\", "\\\\") + '"' diff --git a/dlt/common/destination/reference.py b/dlt/common/destination/reference.py index b6f06024cc..d89047a946 100644 --- a/dlt/common/destination/reference.py +++ b/dlt/common/destination/reference.py @@ -131,7 +131,9 @@ def normalize_dataset_name(self, schema: Schema) -> str: ) return ( - schema.naming.normalize_table_identifier(self.dataset_name) if self.dataset_name else self.dataset_name + schema.naming.normalize_table_identifier(self.dataset_name) + if self.dataset_name + else self.dataset_name ) if TYPE_CHECKING: diff --git a/dlt/destinations/impl/clickhouse/clickhouse_adapter.py b/dlt/destinations/impl/clickhouse/clickhouse_adapter.py index a2e8d39c03..d219c288ae 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse_adapter.py +++ b/dlt/destinations/impl/clickhouse/clickhouse_adapter.py @@ -22,6 +22,7 @@ TABLE_ENGINE_TYPES: Set[TTableEngineType] = set(get_args(TTableEngineType)) TABLE_ENGINE_TYPE_HINT: Literal["x-table-engine-type"] = "x-table-engine-type" + def clickhouse_adapter(data: Any, table_engine_type: TTableEngineType = None) -> DltResource: """Prepares data for the Clickhouse destination by specifying which table engine type that should be used. diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 6f59557d71..2f619c04b1 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -1,6 +1,6 @@ import typing as t -from clickhouse_driver.dbapi import Connection +from clickhouse_driver.dbapi import Connection # type: ignore[import-untyped] from dlt.common.destination import Destination, DestinationCapabilitiesContext from dlt.destinations.impl.clickhouse import capabilities @@ -55,8 +55,3 @@ def __init__( create_indexes=create_indexes, **kwargs, ) - - - - - diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 1323dc2da9..87b5651668 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -93,7 +93,9 @@ def execute_sql( @contextmanager @raise_database_error - def execute_query(self, query: AnyStr, *args: Any, **kwargs: Any) -> Iterator[ClickhouseDBApiCursorImpl]: + def execute_query( + self, query: AnyStr, *args: Any, **kwargs: Any + ) -> Iterator[ClickhouseDBApiCursorImpl]: cur: clickhouse_driver.dbapi.connection.Cursor with self._conn.cursor() as cur: try: From c134cadb0ff1af9fd8ac90d93f9985bcb4896114 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 14 Mar 2024 19:26:36 +0200 Subject: [PATCH 015/127] Add escape_clickhouse_literal function for Clickhouse Signed-off-by: Marcel Coetzee --- dlt/common/data_writers/escape.py | 36 ++++++++++++++++++++ dlt/destinations/impl/clickhouse/__init__.py | 3 +- 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/dlt/common/data_writers/escape.py b/dlt/common/data_writers/escape.py index 3535ad4ef9..027e7b1554 100644 --- a/dlt/common/data_writers/escape.py +++ b/dlt/common/data_writers/escape.py @@ -137,5 +137,41 @@ def escape_databricks_literal(v: Any) -> Any: return "NULL" if v is None else str(v) +# https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/sql-reference/syntax.md#string +CLICKHOUSE_ESCAPE_DICT = { + "'": "''", + "\\": "\\\\", + "\n": "\\n", + "\t": "\\t", + "\b": "\\b", + "\f": "\\f", + "\r": "\\r", + "\0": "\\0", + "\a": "\\a", + "\v": "\\v", +} + +CLICKHOUSE_ESCAPE_RE = _make_sql_escape_re(CLICKHOUSE_ESCAPE_DICT) + + +def escape_clickhouse_literal(v: Any) -> Any: + if isinstance(v, str): + return _escape_extended( + v, prefix="'", escape_dict=CLICKHOUSE_ESCAPE_DICT, escape_re=CLICKHOUSE_ESCAPE_RE + ) + if isinstance(v, (datetime, date, time)): + return f"'{v.isoformat()}'" + if isinstance(v, (list, dict)): + return _escape_extended( + json.dumps(v), + prefix="'", + escape_dict=CLICKHOUSE_ESCAPE_DICT, + escape_re=CLICKHOUSE_ESCAPE_RE, + ) + if isinstance(v, bytes): + return f"'{v.hex()}'" + return "NULL" if v is None else str(v) + + def escape_clickhouse_identifier(v: str) -> str: return "`" + v.replace("`", "``").replace("\\", "\\\\") + '"' diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 236e83ef54..88ea37d014 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -1,5 +1,5 @@ from dlt.common.arithmetics import DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE -from dlt.common.data_writers.escape import escape_clickhouse_identifier +from dlt.common.data_writers.escape import escape_clickhouse_identifier, escape_clickhouse_literal from dlt.common.destination import DestinationCapabilitiesContext @@ -12,6 +12,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps.supported_staging_file_formats = ["jsonl", "parquet", "arrow"] caps.escape_identifier = escape_clickhouse_identifier + caps.escape_literal = escape_clickhouse_literal caps.schema_supports_numeric_precision = True # Use 'Decimal128' with these defaults. From 2fa644c2e2b8ffa09cfeab17a90a172670b80e5c Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sun, 17 Mar 2024 13:01:28 +0200 Subject: [PATCH 016/127] Add "insert_values" to supported loader file formats Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 88ea37d014..8613a29cb2 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -7,7 +7,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["jsonl", "parquet", "arrow"] + caps.supported_loader_file_formats = ["jsonl", "parquet", "arrow", "insert_values"] caps.preferred_staging_file_format = "jsonl" caps.supported_staging_file_formats = ["jsonl", "parquet", "arrow"] From 0af45b98439fbf0e7e2afe4de49ec33bffa58553 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sun, 17 Mar 2024 19:34:16 +0200 Subject: [PATCH 017/127] Add `wei` to "from_db_type" Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index b5d9624b97..ee021bfe46 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -2,7 +2,7 @@ from typing import ClassVar, Optional, Dict, List, Sequence from dlt.common.destination import DestinationCapabilitiesContext -from dlt.common.destination.reference import SupportsStagingDestination +from dlt.common.destination.reference import SupportsStagingDestination, TLoadJobState from dlt.common.schema import Schema, TColumnSchema from dlt.common.schema.typing import TTableFormat, TTableSchema, TColumnHint, TColumnType from dlt.destinations.impl.clickhouse import capabilities @@ -67,6 +67,13 @@ class ClickhouseTypeMapper(TypeMapper): def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = None) -> str: return "DateTime" + def from_db_type( + self, db_type: str, precision: Optional[int] = None, scale: Optional[int] = None + ) -> TColumnType: + if db_type == "Decimal" and (precision, scale) == self.capabilities.wei_precision: + return dict(data_type="wei") + return super().from_db_type(db_type, precision, scale) + class ClickhouseCopyFileLoadJob(CopyRemoteFileLoadJob): def __init__( @@ -84,7 +91,9 @@ def exception(self) -> str: pass -class ClickhouseMergeJob(SqlMergeJob): ... +class ClickhouseMergeJob(SqlMergeJob): + def __init__(self, file_name: str, status: TLoadJobState): + super().__init__(file_name, status) class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): From de66efa91488071a07817dcb0c66b5477990126a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sun, 17 Mar 2024 23:33:37 +0200 Subject: [PATCH 018/127] Improve Clickhouse loader code and update comments #1055 Signed-off-by: Marcel Coetzee --- dlt/common/destination/reference.py | 24 +-- dlt/destinations/impl/clickhouse/__init__.py | 2 +- .../impl/clickhouse/clickhouse.py | 137 +++++++++++++++--- .../impl/clickhouse/configuration.py | 6 +- dlt/destinations/job_impl.py | 4 +- 5 files changed, 136 insertions(+), 37 deletions(-) diff --git a/dlt/common/destination/reference.py b/dlt/common/destination/reference.py index d89047a946..d564da567a 100644 --- a/dlt/common/destination/reference.py +++ b/dlt/common/destination/reference.py @@ -201,22 +201,22 @@ def __init__( class LoadJob: - """Represents a job that loads a single file + """Represents a job that loads a single file. - Each job starts in "running" state and ends in one of terminal states: "retry", "failed" or "completed". - Each job is uniquely identified by a file name. The file is guaranteed to exist in "running" state. In terminal state, the file may not be present. - In "running" state, the loader component periodically gets the state via `status()` method. When terminal state is reached, load job is discarded and not called again. + Each job starts in "running" state and ends in one of the terminal states: "retry", "failed" or "completed". + A filename uniquely identifies each job. The file is guaranteed to exist in "running" state. In terminal state, the file may not be present. + In "running" state, the loader component periodically gets the state via `status()` method. When terminal state is reached, a load job is discarded and not called again. `exception` method is called to get error information in "failed" and "retry" states. The `__init__` method is responsible to put the Job in "running" state. It may raise `LoadClientTerminalException` and `LoadClientTransientException` to - immediately transition job into "failed" or "retry" state respectively. + immediately transition a job into "failed" or "retry" state respectively. """ def __init__(self, file_name: str) -> None: """ - File name is also a job id (or job id is deterministically derived) so it must be globally unique + Filename is a job ID (or job ID is deterministically derived), so it must be globally unique. """ - # ensure file name + # Ensure filename. assert file_name == FileStorage.get_file_name_from_file_path(file_name) self._file_name = file_name self._parsed_file_name = ParsedLoadJobFileName.parse(file_name) @@ -231,7 +231,7 @@ def file_name(self) -> str: return self._file_name def job_id(self) -> str: - """The job id that is derived from the file name and does not changes during job lifecycle""" + """The job ID that is derived from the filename and does not change during job lifecycle.""" return self._parsed_file_name.job_id() def job_file_info(self) -> ParsedLoadJobFileName: @@ -239,7 +239,7 @@ def job_file_info(self) -> ParsedLoadJobFileName: @abstractmethod def exception(self) -> str: - """The exception associated with failed or retry states""" + """The exception associated with failed or retry states.""" pass @@ -248,15 +248,15 @@ class NewLoadJob(LoadJob): @abstractmethod def new_file_path(self) -> str: - """Path to a newly created temporary job file. If empty, no followup job should be created""" + """Path to a newly created temporary job file. If empty, no followup job should be created.""" pass class FollowupJob: - """Adds a trait that allows to create a followup job""" + """Adds a trait that allows to create a followup job.""" def create_followup_jobs(self, final_state: TLoadJobState) -> List[NewLoadJob]: - """Return list of new jobs. `final_state` is state to which this job transits""" + """Return list of new jobs. `final_state` is state to which this job transits.""" return [] diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 8613a29cb2..88ea37d014 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -7,7 +7,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["jsonl", "parquet", "arrow", "insert_values"] + caps.supported_loader_file_formats = ["jsonl", "parquet", "arrow"] caps.preferred_staging_file_format = "jsonl" caps.supported_staging_file_formats = ["jsonl", "parquet", "arrow"] diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index ee021bfe46..a4874e4e87 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,10 +1,23 @@ from copy import deepcopy from typing import ClassVar, Optional, Dict, List, Sequence +from urllib.parse import urlparse, urlunparse +from dlt.common.configuration.specs import ( + CredentialsConfiguration, + AwsCredentialsWithoutDefaults, + AzureCredentialsWithoutDefaults, +) from dlt.common.destination import DestinationCapabilitiesContext -from dlt.common.destination.reference import SupportsStagingDestination, TLoadJobState +from dlt.common.destination.reference import ( + SupportsStagingDestination, + TLoadJobState, + FollowupJob, + LoadJob, +) from dlt.common.schema import Schema, TColumnSchema from dlt.common.schema.typing import TTableFormat, TTableSchema, TColumnHint, TColumnType +from dlt.common.storages import FileStorage +from dlt.destinations.exceptions import LoadJobTerminalException from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( TTableEngineType, @@ -12,15 +25,13 @@ ) from dlt.destinations.impl.clickhouse.configuration import ( ClickhouseClientConfiguration, - ClickhouseCredentials, ) from dlt.destinations.impl.clickhouse.sql_client import ClickhouseSqlClient from dlt.destinations.job_client_impl import ( SqlJobClientWithStaging, - CopyRemoteFileLoadJob, SqlJobClientBase, ) -from dlt.destinations.sql_jobs import SqlMergeJob +from dlt.destinations.job_impl import NewReferenceJob, EmptyLoadJob from dlt.destinations.type_mapping import TypeMapper @@ -75,25 +86,97 @@ def from_db_type( return super().from_db_type(db_type, precision, scale) -class ClickhouseCopyFileLoadJob(CopyRemoteFileLoadJob): +class ClickhouseLoadJob(LoadJob, FollowupJob): def __init__( self, - table: TTableSchema, file_path: str, - sql_client: ClickhouseSqlClient, - staging_credentials: Optional[ClickhouseCredentials] = None, - staging_iam_role: str = None, + table_name: str, + load_id: str, + client: ClickhouseSqlClient, + staging_credentials: Optional[CredentialsConfiguration] = None, ) -> None: - self._staging_iam_role = staging_iam_role - super().__init__(table, file_path, sql_client, staging_credentials) + file_name = FileStorage.get_file_name_from_file_path(file_path) + super().__init__(file_name) - def exception(self) -> str: - pass + qualified_table_name = client.make_qualified_table_name(table_name) + + bucket_path: str = ( + NewReferenceJob.resolve_reference(file_path) + if NewReferenceJob.is_reference_job(file_path) + else "" + ) + file_name = ( + FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name + ) + credentials_clause = "" + files_clause = "" + + if bucket_path: + bucket_url = urlparse(bucket_path) + bucket_scheme = bucket_url.scheme + # Referencing an external s3/azure stage does not require explicit AWS credentials. + if ( + bucket_scheme == "s3" + and staging_credentials + and isinstance(staging_credentials, AwsCredentialsWithoutDefaults) + ): + credentials_clause = f"""CREDENTIALS=(AWS_KEY_ID='{staging_credentials.aws_access_key_id}' AWS_SECRET_KEY='{staging_credentials.aws_secret_access_key}')""" + from_clause = f"FROM '{bucket_path}'" + elif ( + bucket_scheme in ["az", "abfs"] + and staging_credentials + and isinstance(staging_credentials, AzureCredentialsWithoutDefaults) + ): + # Explicit azure credentials are needed to load from bucket without a named stage + credentials_clause = f"CREDENTIALS=(AZURE_SAS_TOKEN='?{staging_credentials.azure_storage_sas_token}')" + # Converts an az:/// to azure://.blob.core.windows.net// as required by Clickhouse. + _path = f"/{bucket_url.netloc}{bucket_url.path}" + bucket_path = urlunparse( + bucket_url._replace( + scheme="azure", + netloc=f"{staging_credentials.azure_storage_account_name}.blob.core.windows.net", + path=_path, + ) + ) + from_clause = f"FROM '{bucket_path}'" + else: + # Ensure that gcs bucket path starts with gcs://; this is a requirement of Clickhouse. + bucket_path = bucket_path.replace("gs://", "gcs://") + from_clause = f"FROM @{stage_name}/" + files_clause = f"FILES = ('{urlparse(bucket_path).path.lstrip('/')}')" + else: + # This means we have a local file. + if not stage_name: + # Use implicit table stage by default: "SCHEMA_NAME"."%TABLE_NAME". + stage_name = client.make_qualified_table_name(f"%{table_name}") + stage_file_path = f'@{stage_name}/"{load_id}"/{file_name}' + from_clause = f"FROM {stage_file_path}" + # Decide on source format, stage_file_path will either be a local file or a bucket path. + source_format = "( TYPE = 'JSON', BINARY_FORMAT = 'BASE64' )" + if file_name.endswith("parquet"): + source_format = "(TYPE = 'PARQUET', BINARY_AS_TEXT = FALSE)" -class ClickhouseMergeJob(SqlMergeJob): - def __init__(self, file_name: str, status: TLoadJobState): - super().__init__(file_name, status) + with client.begin_transaction(): + # PUT and COPY in one transaction if local file, otherwise only copy. + if not bucket_path: + client.execute_sql( + f'PUT file://{file_path} @{stage_name}/"{load_id}" OVERWRITE = TRUE,' + " AUTO_COMPRESS = FALSE" + ) + client.execute_sql(f"""COPY INTO {qualified_table_name} + {from_clause} + {files_clause} + {credentials_clause} + FILE_FORMAT = {source_format} + MATCH_BY_COLUMN_NAME='CASE_INSENSITIVE' + """) + + def state(self) -> TLoadJobState: + return "completed" + + def exception(self) -> str: + raise NotImplementedError() class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): @@ -104,15 +187,26 @@ def __init__( schema: Schema, config: ClickhouseClientConfiguration, ) -> None: - self.config: ClickhouseClientConfiguration = config # TODO: There are no schemas in Clickhouse. No point in having schemas, only dataset names and table names for example "dataset1_mytable". - self.sql_client = ClickhouseSqlClient( - self.config.normalize_dataset_name(self.schema), self.config.credentials + self.sql_client: ClickhouseSqlClient = ClickhouseSqlClient( + config.normalize_dataset_name(schema), config.credentials ) - super().__init__(schema, self.config, self.sql_client) + super().__init__(schema, config, self.sql_client) + self.config: ClickhouseClientConfiguration = config self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) if self.config.create_indexes else {} self.type_mapper = ClickhouseTypeMapper(self.capabilities) + def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> LoadJob: + return super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( + file_path, + table["name"], + load_id, + self.sql_client, + staging_credentials=( + self.config.staging_config.credentials if self.config.staging_config else None + ), + ) + def _get_table_update_sql( self, table_name: str, new_columns: Sequence[TColumnSchema], generate_alter: bool ) -> List[str]: @@ -161,3 +255,6 @@ def _from_db_type( self, ch_t: str, precision: Optional[int], scale: Optional[int] ) -> TColumnType: return self.type_mapper.from_db_type(ch_t, precision, scale) + + def restore_file_load(self, file_path: str) -> LoadJob: + return EmptyLoadJob.from_file_path(file_path, "completed") diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 359f08e47e..e8d01ba4b0 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -1,4 +1,4 @@ -from typing import ClassVar, List, Any, Final, TYPE_CHECKING +from typing import ClassVar, List, Any, Final, TYPE_CHECKING, Optional from dlt.common.configuration import configspec from dlt.common.configuration.specs import ConnectionStringCredentials @@ -63,7 +63,7 @@ class ClickhouseClientConfiguration(DestinationClientDwhWithStagingConfiguration # but they do not enforce uniqueness constraints. It permits duplicate values even for the primary key # columns within the same granule. # See: https://clickhouse.com/docs/en/optimize/sparse-primary-indexes - create_indexes: bool = False + create_indexes: bool = True """Whether `primary_key` column hint is applied. Note that Clickhouse has no unique constraint, and primary keys don't guarantee uniqueness.""" @@ -83,6 +83,8 @@ def __init__( credentials: ClickhouseCredentials = None, dataset_name: str = None, create_indexes: bool = False, + stage_name: str = None, + keep_staged_files: bool = True, destination_name: str = None, environment: str = None ) -> None: diff --git a/dlt/destinations/job_impl.py b/dlt/destinations/job_impl.py index 7a6b98544c..057f5d606d 100644 --- a/dlt/destinations/job_impl.py +++ b/dlt/destinations/job_impl.py @@ -46,7 +46,7 @@ class NewReferenceJob(NewLoadJobImpl): def __init__( self, file_name: str, status: TLoadJobState, exception: str = None, remote_path: str = None ) -> None: - file_name = os.path.splitext(file_name)[0] + ".reference" + file_name = f"{os.path.splitext(file_name)[0]}.reference" super().__init__(file_name, status, exception) self._remote_path = remote_path self._save_text_file(remote_path) @@ -58,5 +58,5 @@ def is_reference_job(file_path: str) -> bool: @staticmethod def resolve_reference(file_path: str) -> str: with open(file_path, "r+", encoding="utf-8") as f: - # Reading from a file + # Reading from a file. return f.read() From 7e0d508b7ba74e91ef9b7b31d3958151e510b224 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 18 Mar 2024 15:33:37 +0200 Subject: [PATCH 019/127] Preliminary CH tests and utility module Signed-off-by: Marcel Coetzee --- dlt/destinations/__init__.py | 22 +-- .../impl/clickhouse/clickhouse.py | 6 +- dlt/destinations/impl/clickhouse/factory.py | 5 +- dlt/destinations/impl/clickhouse/utils.py | 30 ++++ tests/load/clickhouse/__init__.py | 3 + .../test_clickhouse_configuration.py | 137 ++++++++++++++++++ .../test_clickhouse_table_builder.py | 93 ++++++++++++ tests/load/clickhouse/test_utls.py | 95 ++++++++++++ tests/utils.py | 1 + 9 files changed, 375 insertions(+), 17 deletions(-) create mode 100644 dlt/destinations/impl/clickhouse/utils.py create mode 100644 tests/load/clickhouse/__init__.py create mode 100644 tests/load/clickhouse/test_clickhouse_configuration.py create mode 100644 tests/load/clickhouse/test_clickhouse_table_builder.py create mode 100644 tests/load/clickhouse/test_utls.py diff --git a/dlt/destinations/__init__.py b/dlt/destinations/__init__.py index 4a10deffc0..98b247e81e 100644 --- a/dlt/destinations/__init__.py +++ b/dlt/destinations/__init__.py @@ -1,18 +1,19 @@ -from dlt.destinations.impl.postgres.factory import postgres -from dlt.destinations.impl.snowflake.factory import snowflake -from dlt.destinations.impl.filesystem.factory import filesystem +from dlt.destinations.impl.athena.factory import athena +from dlt.destinations.impl.bigquery.factory import bigquery +from dlt.destinations.impl.clickhouse.factory import clickhouse +from dlt.destinations.impl.databricks.factory import databricks +from dlt.destinations.impl.destination.factory import destination from dlt.destinations.impl.duckdb.factory import duckdb from dlt.destinations.impl.dummy.factory import dummy +from dlt.destinations.impl.filesystem.factory import filesystem +from dlt.destinations.impl.motherduck.factory import motherduck from dlt.destinations.impl.mssql.factory import mssql -from dlt.destinations.impl.bigquery.factory import bigquery -from dlt.destinations.impl.athena.factory import athena -from dlt.destinations.impl.redshift.factory import redshift +from dlt.destinations.impl.postgres.factory import postgres from dlt.destinations.impl.qdrant.factory import qdrant -from dlt.destinations.impl.motherduck.factory import motherduck -from dlt.destinations.impl.weaviate.factory import weaviate -from dlt.destinations.impl.destination.factory import destination +from dlt.destinations.impl.redshift.factory import redshift +from dlt.destinations.impl.snowflake.factory import snowflake from dlt.destinations.impl.synapse.factory import synapse -from dlt.destinations.impl.databricks.factory import databricks +from dlt.destinations.impl.weaviate.factory import weaviate __all__ = [ @@ -31,4 +32,5 @@ "synapse", "databricks", "destination", + "clickhouse", ] diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index a4874e4e87..48e7007a3c 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -17,7 +17,6 @@ from dlt.common.schema import Schema, TColumnSchema from dlt.common.schema.typing import TTableFormat, TTableSchema, TColumnHint, TColumnType from dlt.common.storages import FileStorage -from dlt.destinations.exceptions import LoadJobTerminalException from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( TTableEngineType, @@ -111,10 +110,11 @@ def __init__( credentials_clause = "" files_clause = "" + if bucket_path: bucket_url = urlparse(bucket_path) bucket_scheme = bucket_url.scheme - # Referencing an external s3/azure stage does not require explicit AWS credentials. + # TODO: convert all object storage endpoints to http protocol. if ( bucket_scheme == "s3" and staging_credentials @@ -123,7 +123,7 @@ def __init__( credentials_clause = f"""CREDENTIALS=(AWS_KEY_ID='{staging_credentials.aws_access_key_id}' AWS_SECRET_KEY='{staging_credentials.aws_secret_access_key}')""" from_clause = f"FROM '{bucket_path}'" elif ( - bucket_scheme in ["az", "abfs"] + bucket_scheme in ("az", "abfs") and staging_credentials and isinstance(staging_credentials, AzureCredentialsWithoutDefaults) ): diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 2f619c04b1..90065c6582 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -4,16 +4,13 @@ from dlt.common.destination import Destination, DestinationCapabilitiesContext from dlt.destinations.impl.clickhouse import capabilities +from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient from dlt.destinations.impl.clickhouse.configuration import ( ClickhouseClientConfiguration, ClickhouseCredentials, ) -if t.TYPE_CHECKING: - from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient - - # noinspection PyPep8Naming class clickhouse(Destination[ClickhouseClientConfiguration, ClickhouseClient]): spec = ClickhouseClientConfiguration diff --git a/dlt/destinations/impl/clickhouse/utils.py b/dlt/destinations/impl/clickhouse/utils.py new file mode 100644 index 0000000000..56ef29599b --- /dev/null +++ b/dlt/destinations/impl/clickhouse/utils.py @@ -0,0 +1,30 @@ +from urllib.parse import urlparse + + +def convert_storage_url_to_http( + url: str, use_https: bool = False, endpoint: str = None, region: str = None +) -> str: + try: + parsed_url = urlparse(url) + + bucket_name = parsed_url.netloc + object_key = parsed_url.path.lstrip("/") + protocol = "https" if use_https else "http" + + if endpoint: + domain = endpoint + elif region and parsed_url.scheme == "s3": + domain = f"s3-{region}.amazonaws.com" + else: + storage_domains = { + "s3": "s3.amazonaws.com", + "gs": "storage.googleapis.com", + "gcs": "storage.googleapis.com", + } + + domain = storage_domains[parsed_url.scheme] + + return f"{protocol}://{bucket_name}.{domain}/{object_key}" + + except Exception as e: + raise Exception(f"Error converting storage URL to HTTP protocol: '{url}'") from e diff --git a/tests/load/clickhouse/__init__.py b/tests/load/clickhouse/__init__.py new file mode 100644 index 0000000000..440cb317fd --- /dev/null +++ b/tests/load/clickhouse/__init__.py @@ -0,0 +1,3 @@ +from tests.utils import skip_if_not_active + +skip_if_not_active("clickhouse") diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py new file mode 100644 index 0000000000..d9d7eb3c27 --- /dev/null +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -0,0 +1,137 @@ +import os +import pytest +from pathlib import Path +from dlt.common.libs.sql_alchemy import make_url + +pytest.importorskip("clickhouse") + +from dlt.common.configuration.resolve import resolve_configuration +from dlt.common.configuration.exceptions import ConfigurationValueError +from dlt.common.utils import digest128 + +from dlt.destinations.impl.snowflake.configuration import ( + SnowflakeClientConfiguration, + SnowflakeCredentials, +) + +from tests.common.configuration.utils import environment + + +@ +def test_connection_string_with_all_params(g -> None: + url = "snowflake://user1:pass1@host1/db1?warehouse=warehouse1&role=role1&private_key=cGs%3D&private_key_passphrase=paphr" + + creds = SnowflakeCredentials() + creds.parse_native_representation(url) + + assert creds.database == "db1" + assert creds.username == "user1" + assert creds.password == "pass1" + assert creds.host == "host1" + assert creds.warehouse == "warehouse1" + assert creds.role == "role1" + assert creds.private_key == "cGs=" + assert creds.private_key_passphrase == "paphr" + + expected = make_url(url) + + # Test URL components regardless of query param order + assert make_url(creds.to_native_representation()) == expected + + +def test_to_connector_params() -> None: + # PEM key + pkey_str = Path("./tests/common/cases/secrets/encrypted-private-key").read_text("utf8") + + creds = SnowflakeCredentials() + creds.private_key = pkey_str # type: ignore[assignment] + creds.private_key_passphrase = "12345" # type: ignore[assignment] + creds.username = "user1" + creds.database = "db1" + creds.host = "host1" + creds.warehouse = "warehouse1" + creds.role = "role1" + + params = creds.to_connector_params() + + assert isinstance(params["private_key"], bytes) + params.pop("private_key") + + assert params == dict( + user="user1", + database="db1", + account="host1", + password=None, + warehouse="warehouse1", + role="role1", + ) + + # base64 encoded DER key + pkey_str = Path("./tests/common/cases/secrets/encrypted-private-key-base64").read_text("utf8") + + creds = SnowflakeCredentials() + creds.private_key = pkey_str # type: ignore[assignment] + creds.private_key_passphrase = "12345" # type: ignore[assignment] + creds.username = "user1" + creds.database = "db1" + creds.host = "host1" + creds.warehouse = "warehouse1" + creds.role = "role1" + + params = creds.to_connector_params() + + assert isinstance(params["private_key"], bytes) + params.pop("private_key") + + assert params == dict( + user="user1", + database="db1", + account="host1", + password=None, + warehouse="warehouse1", + role="role1", + ) + + +def test_snowflake_credentials_native_value(environment) -> None: + with pytest.raises(ConfigurationValueError): + resolve_configuration( + SnowflakeCredentials(), + explicit_value="snowflake://user1@host1/db1?warehouse=warehouse1&role=role1", + ) + # set password via env + os.environ["CREDENTIALS__PASSWORD"] = "pass" + c = resolve_configuration( + SnowflakeCredentials(), + explicit_value="snowflake://user1@host1/db1?warehouse=warehouse1&role=role1", + ) + assert c.is_resolved() + assert c.password == "pass" + # # but if password is specified - it is final + c = resolve_configuration( + SnowflakeCredentials(), + explicit_value="snowflake://user1:pass1@host1/db1?warehouse=warehouse1&role=role1", + ) + assert c.is_resolved() + assert c.password == "pass1" + + # set PK via env + del os.environ["CREDENTIALS__PASSWORD"] + os.environ["CREDENTIALS__PRIVATE_KEY"] = "pk" + c = resolve_configuration( + SnowflakeCredentials(), + explicit_value="snowflake://user1@host1/db1?warehouse=warehouse1&role=role1", + ) + assert c.is_resolved() + assert c.private_key == "pk" + + +def test_snowflake_configuration() -> None: + # def empty fingerprint + assert SnowflakeClientConfiguration().fingerprint() == "" + # based on host + c = resolve_configuration( + SnowflakeCredentials(), + explicit_value="snowflake://user1:pass@host1/db1?warehouse=warehouse1&role=role1", + ) + assert SnowflakeClientConfiguration(credentials=c).fingerprint() == digest128("host1") diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py new file mode 100644 index 0000000000..1e80a61f1c --- /dev/null +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -0,0 +1,93 @@ +from copy import deepcopy + +import pytest +import sqlfluff + +from dlt.common.utils import uniq_id +from dlt.common.schema import Schema +from dlt.destinations.impl.snowflake.snowflake import SnowflakeClient +from dlt.destinations.impl.snowflake.configuration import ( + SnowflakeClientConfiguration, + SnowflakeCredentials, +) +from dlt.destinations.exceptions import DestinationSchemaWillNotUpdate + +from tests.load.utils import TABLE_UPDATE, empty_schema + + +@pytest.fixture +def snowflake_client(empty_schema: Schema) -> SnowflakeClient: + # return client without opening connection + creds = SnowflakeCredentials() + return SnowflakeClient( + empty_schema, + SnowflakeClientConfiguration(dataset_name="test_" + uniq_id(), credentials=creds), + ) + + +def test_create_table(snowflake_client: SnowflakeClient) -> None: + statements = snowflake_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) + assert len(statements) == 1 + sql = statements[0] + sqlfluff.parse(sql, dialect="snowflake") + + assert sql.strip().startswith("CREATE TABLE") + assert "EVENT_TEST_TABLE" in sql + assert '"COL1" NUMBER(19,0) NOT NULL' in sql + assert '"COL2" FLOAT NOT NULL' in sql + assert '"COL3" BOOLEAN NOT NULL' in sql + assert '"COL4" TIMESTAMP_TZ NOT NULL' in sql + assert '"COL5" VARCHAR' in sql + assert '"COL6" NUMBER(38,9) NOT NULL' in sql + assert '"COL7" BINARY' in sql + assert '"COL8" NUMBER(38,0)' in sql + assert '"COL9" VARIANT NOT NULL' in sql + assert '"COL10" DATE NOT NULL' in sql + + +def test_alter_table(snowflake_client: SnowflakeClient) -> None: + statements = snowflake_client._get_table_update_sql("event_test_table", TABLE_UPDATE, True) + assert len(statements) == 1 + sql = statements[0] + + # TODO: sqlfluff doesn't parse snowflake multi ADD COLUMN clause correctly + # sqlfluff.parse(sql, dialect='snowflake') + + assert sql.startswith("ALTER TABLE") + assert sql.count("ALTER TABLE") == 1 + assert sql.count("ADD COLUMN") == 1 + assert '"EVENT_TEST_TABLE"' in sql + assert '"COL1" NUMBER(19,0) NOT NULL' in sql + assert '"COL2" FLOAT NOT NULL' in sql + assert '"COL3" BOOLEAN NOT NULL' in sql + assert '"COL4" TIMESTAMP_TZ NOT NULL' in sql + assert '"COL5" VARCHAR' in sql + assert '"COL6" NUMBER(38,9) NOT NULL' in sql + assert '"COL7" BINARY' in sql + assert '"COL8" NUMBER(38,0)' in sql + assert '"COL9" VARIANT NOT NULL' in sql + assert '"COL10" DATE' in sql + + mod_table = deepcopy(TABLE_UPDATE) + mod_table.pop(0) + sql = snowflake_client._get_table_update_sql("event_test_table", mod_table, True)[0] + + assert '"COL1"' not in sql + assert '"COL2" FLOAT NOT NULL' in sql + + +def test_create_table_with_partition_and_cluster(snowflake_client: SnowflakeClient) -> None: + mod_update = deepcopy(TABLE_UPDATE) + # timestamp + mod_update[3]["partition"] = True + mod_update[4]["cluster"] = True + mod_update[1]["cluster"] = True + statements = snowflake_client._get_table_update_sql("event_test_table", mod_update, False) + assert len(statements) == 1 + sql = statements[0] + + # TODO: Can't parse cluster by + # sqlfluff.parse(sql, dialect="snowflake") + + # clustering must be the last + assert sql.endswith('CLUSTER BY ("COL2","COL5")') diff --git a/tests/load/clickhouse/test_utls.py b/tests/load/clickhouse/test_utls.py new file mode 100644 index 0000000000..e4730a6f20 --- /dev/null +++ b/tests/load/clickhouse/test_utls.py @@ -0,0 +1,95 @@ +import pytest + +from dlt.destinations.impl.clickhouse.utils import convert_storage_url_to_http + + +def test_convert_s3_url_to_http() -> None: + s3_url: str = "s3://my-bucket/path/to/file.txt" + expected_http_url: str = "http://my-bucket.s3.amazonaws.com/path/to/file.txt" + assert convert_storage_url_to_http(s3_url) == expected_http_url + + +def test_convert_s3_url_to_https() -> None: + s3_url: str = "s3://my-bucket/path/to/file.txt" + expected_https_url: str = "https://my-bucket.s3.amazonaws.com/path/to/file.txt" + assert convert_storage_url_to_http(s3_url, use_https=True) == expected_https_url + + +def test_convert_gs_url_to_http() -> None: + gs_url: str = "gs://my-bucket/path/to/file.txt" + expected_http_url: str = "http://my-bucket.storage.googleapis.com/path/to/file.txt" + assert convert_storage_url_to_http(gs_url) == expected_http_url + gcs_url = "gcs://my-bucket/path/to/file.txt" + expected_http_url = "http://my-bucket.storage.googleapis.com/path/to/file.txt" + assert convert_storage_url_to_http(gcs_url) == expected_http_url + + +def test_convert_gs_url_to_https() -> None: + gs_url: str = "gs://my-bucket/path/to/file.txt" + expected_https_url: str = "https://my-bucket.storage.googleapis.com/path/to/file.txt" + assert convert_storage_url_to_http(gs_url, use_https=True) == expected_https_url + gcs_url = "gcs://my-bucket/path/to/file.txt" + expected_https_url = "https://my-bucket.storage.googleapis.com/path/to/file.txt" + assert convert_storage_url_to_http(gcs_url, use_https=True) == expected_https_url + + +def test_convert_s3_url_to_http_with_region() -> None: + s3_url: str = "s3://my-bucket/path/to/file.txt" + expected_http_url: str = "http://my-bucket.s3-us-west-2.amazonaws.com/path/to/file.txt" + assert convert_storage_url_to_http(s3_url, region="us-west-2") == expected_http_url + + +def test_convert_s3_url_to_https_with_region() -> None: + s3_url: str = "s3://my-bucket/path/to/file.txt" + expected_https_url: str = "https://my-bucket.s3-us-east-1.amazonaws.com/path/to/file.txt" + assert ( + convert_storage_url_to_http(s3_url, use_https=True, region="us-east-1") + == expected_https_url + ) + + +def test_convert_s3_url_to_http_with_endpoint() -> None: + s3_url: str = "s3://my-bucket/path/to/file.txt" + expected_http_url: str = "http://my-bucket.s3.custom-endpoint.com/path/to/file.txt" + assert ( + convert_storage_url_to_http(s3_url, endpoint="s3.custom-endpoint.com") == expected_http_url + ) + + +def test_convert_s3_url_to_https_with_endpoint() -> None: + s3_url: str = "s3://my-bucket/path/to/file.txt" + expected_https_url: str = "https://my-bucket.s3.custom-endpoint.com/path/to/file.txt" + assert ( + convert_storage_url_to_http(s3_url, use_https=True, endpoint="s3.custom-endpoint.com") + == expected_https_url + ) + + +def test_convert_gs_url_to_http_with_endpoint() -> None: + gs_url: str = "gs://my-bucket/path/to/file.txt" + expected_http_url: str = "http://my-bucket.custom-endpoint.com/path/to/file.txt" + assert convert_storage_url_to_http(gs_url, endpoint="custom-endpoint.com") == expected_http_url + gcs_url = "gcs://my-bucket/path/to/file.txt" + expected_http_url = "http://my-bucket.custom-endpoint.com/path/to/file.txt" + assert convert_storage_url_to_http(gcs_url, endpoint="custom-endpoint.com") == expected_http_url + + +def test_convert_gs_url_to_https_with_endpoint() -> None: + gs_url: str = "gs://my-bucket/path/to/file.txt" + expected_https_url: str = "https://my-bucket.custom-endpoint.com/path/to/file.txt" + assert ( + convert_storage_url_to_http(gs_url, use_https=True, endpoint="custom-endpoint.com") + == expected_https_url + ) + gcs_url = "gcs://my-bucket/path/to/file.txt" + expected_https_url = "https://my-bucket.custom-endpoint.com/path/to/file.txt" + assert ( + convert_storage_url_to_http(gcs_url, use_https=True, endpoint="custom-endpoint.com") + == expected_https_url + ) + + +def test_invalid_url_format() -> None: + with pytest.raises(Exception) as exc_info: + convert_storage_url_to_http("invalid-url") + assert str(exc_info.value) == "Error converting storage URL to HTTP protocol: 'invalid-url'" diff --git a/tests/utils.py b/tests/utils.py index 924f44de73..e36641ca71 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -48,6 +48,7 @@ "destination", "synapse", "databricks", + "clickhouse", } NON_SQL_DESTINATIONS = {"filesystem", "weaviate", "dummy", "motherduck", "qdrant", "destination"} SQL_DESTINATIONS = IMPLEMENTED_DESTINATIONS - NON_SQL_DESTINATIONS From 57ceeeebbce49d96ba0c61028e42d3018ddb16ea Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 18 Mar 2024 18:42:57 +0200 Subject: [PATCH 020/127] Refactor Clickhouse utilities and update tests Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 4 +- .../impl/clickhouse/clickhouse.py | 78 +++++++--------- dlt/destinations/impl/clickhouse/utils.py | 46 +++++++++- .../test_clickhouse_configuration.py | 7 +- tests/load/clickhouse/test_utls.py | 88 +++++++++++++++---- 5 files changed, 153 insertions(+), 70 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 88ea37d014..acbb08ac9a 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -7,9 +7,9 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["jsonl", "parquet", "arrow"] + caps.supported_loader_file_formats = ["jsonl"] caps.preferred_staging_file_format = "jsonl" - caps.supported_staging_file_formats = ["jsonl", "parquet", "arrow"] + caps.supported_staging_file_formats = ["jsonl"] caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 48e7007a3c..d8a56eb9ff 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,11 +1,10 @@ from copy import deepcopy from typing import ClassVar, Optional, Dict, List, Sequence -from urllib.parse import urlparse, urlunparse +from urllib.parse import urlparse from dlt.common.configuration.specs import ( CredentialsConfiguration, AwsCredentialsWithoutDefaults, - AzureCredentialsWithoutDefaults, ) from dlt.common.destination import DestinationCapabilitiesContext from dlt.common.destination.reference import ( @@ -26,6 +25,11 @@ ClickhouseClientConfiguration, ) from dlt.destinations.impl.clickhouse.sql_client import ClickhouseSqlClient +from dlt.destinations.impl.clickhouse.utils import ( + convert_storage_url_to_http_url, + render_s3_table_function, + render_azure_blob_storage_table_function, +) from dlt.destinations.job_client_impl import ( SqlJobClientWithStaging, SqlJobClientBase, @@ -107,55 +111,39 @@ def __init__( file_name = ( FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name ) - credentials_clause = "" - files_clause = "" - if bucket_path: bucket_url = urlparse(bucket_path) + bucket_http_url = convert_storage_url_to_http_url(bucket_url) bucket_scheme = bucket_url.scheme - # TODO: convert all object storage endpoints to http protocol. - if ( - bucket_scheme == "s3" - and staging_credentials - and isinstance(staging_credentials, AwsCredentialsWithoutDefaults) - ): - credentials_clause = f"""CREDENTIALS=(AWS_KEY_ID='{staging_credentials.aws_access_key_id}' AWS_SECRET_KEY='{staging_credentials.aws_secret_access_key}')""" - from_clause = f"FROM '{bucket_path}'" - elif ( - bucket_scheme in ("az", "abfs") - and staging_credentials - and isinstance(staging_credentials, AzureCredentialsWithoutDefaults) - ): - # Explicit azure credentials are needed to load from bucket without a named stage - credentials_clause = f"CREDENTIALS=(AZURE_SAS_TOKEN='?{staging_credentials.azure_storage_sas_token}')" - # Converts an az:/// to azure://.blob.core.windows.net// as required by Clickhouse. - _path = f"/{bucket_url.netloc}{bucket_url.path}" - bucket_path = urlunparse( - bucket_url._replace( - scheme="azure", - netloc=f"{staging_credentials.azure_storage_account_name}.blob.core.windows.net", - path=_path, + + table_function: str + + if bucket_scheme in ("s3", "gs", "gcs"): + if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): + # Authenticated access. + table_function = render_s3_table_function( + bucket_http_url, + staging_credentials.aws_secret_access_key, + staging_credentials.aws_secret_access_key, ) - ) - from_clause = f"FROM '{bucket_path}'" - else: - # Ensure that gcs bucket path starts with gcs://; this is a requirement of Clickhouse. - bucket_path = bucket_path.replace("gs://", "gcs://") - from_clause = f"FROM @{stage_name}/" - files_clause = f"FILES = ('{urlparse(bucket_path).path.lstrip('/')}')" + else: + # Unsigned access. + table_function = render_s3_table_function(bucket_http_url) + elif bucket_scheme in ("az", "abfs"): + if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): + # Authenticated access. + table_function = render_azure_blob_storage_table_function( + bucket_http_url, + staging_credentials.aws_secret_access_key, + staging_credentials.aws_secret_access_key, + ) + else: + # Unsigned access. + table_function = render_azure_blob_storage_table_function(bucket_http_url) else: - # This means we have a local file. - if not stage_name: - # Use implicit table stage by default: "SCHEMA_NAME"."%TABLE_NAME". - stage_name = client.make_qualified_table_name(f"%{table_name}") - stage_file_path = f'@{stage_name}/"{load_id}"/{file_name}' - from_clause = f"FROM {stage_file_path}" - - # Decide on source format, stage_file_path will either be a local file or a bucket path. - source_format = "( TYPE = 'JSON', BINARY_FORMAT = 'BASE64' )" - if file_name.endswith("parquet"): - source_format = "(TYPE = 'PARQUET', BINARY_AS_TEXT = FALSE)" + # Local file. + raise NotImplementedError with client.begin_transaction(): # PUT and COPY in one transaction if local file, otherwise only copy. diff --git a/dlt/destinations/impl/clickhouse/utils.py b/dlt/destinations/impl/clickhouse/utils.py index 56ef29599b..543a07753b 100644 --- a/dlt/destinations/impl/clickhouse/utils.py +++ b/dlt/destinations/impl/clickhouse/utils.py @@ -1,11 +1,22 @@ -from urllib.parse import urlparse +from typing import Union, Optional, Literal +from urllib.parse import urlparse, ParseResult +from jinja2 import Template -def convert_storage_url_to_http( - url: str, use_https: bool = False, endpoint: str = None, region: str = None + +S3_TABLE_FUNCTION_FILE_FORMATS = Literal["jsonl", "parquet"] + + +def convert_storage_url_to_http_url( + url: Union[str, ParseResult], use_https: bool = False, endpoint: str = None, region: str = None ) -> str: try: - parsed_url = urlparse(url) + if isinstance(url, str): + parsed_url = urlparse(url) + elif isinstance(url, ParseResult): + parsed_url = url + else: + raise TypeError("Invalid URL type. Expected str or ParseResult.") bucket_name = parsed_url.netloc object_key = parsed_url.path.lstrip("/") @@ -16,6 +27,7 @@ def convert_storage_url_to_http( elif region and parsed_url.scheme == "s3": domain = f"s3-{region}.amazonaws.com" else: + # TODO: Incorporate dlt.config endpoint. storage_domains = { "s3": "s3.amazonaws.com", "gs": "storage.googleapis.com", @@ -28,3 +40,29 @@ def convert_storage_url_to_http( except Exception as e: raise Exception(f"Error converting storage URL to HTTP protocol: '{url}'") from e + + +def render_s3_table_function( + url: str, + access_key_id: Optional[str] = None, + secret_access_key: Optional[str] = None, + file_format: Optional[S3_TABLE_FUNCTION_FILE_FORMATS] = "jsonl", +) -> str: + if file_format not in ["parquet", "jsonl"]: + raise ValueError("Clickhouse s3/gcs staging only supports 'parquet' and 'jsonl'.") + + format_mapping = {"jsonl": "JSONEachRow", "parquet": "Parquet"} + clickhouse_format = format_mapping[file_format] + + template = Template("""s3('{{ url }}'{% if access_key_id and secret_access_key %},'{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}')""") + + return template.render( + url=url, + access_key_id=access_key_id, + secret_access_key=secret_access_key, + clickhouse_format=clickhouse_format, + ).strip() + + +def render_azure_blob_storage_table_function(): + raise NotImplementedError diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index d9d7eb3c27..89b78bfe7b 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -2,6 +2,8 @@ import pytest from pathlib import Path from dlt.common.libs.sql_alchemy import make_url +from dlt.destinations.impl.clickhouse.configuration import ClickhouseCredentials + pytest.importorskip("clickhouse") @@ -17,11 +19,10 @@ from tests.common.configuration.utils import environment -@ -def test_connection_string_with_all_params(g -> None: +def test_connection_string_with_all_params() -> None: url = "snowflake://user1:pass1@host1/db1?warehouse=warehouse1&role=role1&private_key=cGs%3D&private_key_passphrase=paphr" - creds = SnowflakeCredentials() + creds = ClickhouseCredentials() creds.parse_native_representation(url) assert creds.database == "db1" diff --git a/tests/load/clickhouse/test_utls.py b/tests/load/clickhouse/test_utls.py index e4730a6f20..5176899775 100644 --- a/tests/load/clickhouse/test_utls.py +++ b/tests/load/clickhouse/test_utls.py @@ -1,49 +1,52 @@ import pytest -from dlt.destinations.impl.clickhouse.utils import convert_storage_url_to_http +from dlt.destinations.impl.clickhouse.utils import ( + convert_storage_url_to_http_url, + render_s3_table_function, +) def test_convert_s3_url_to_http() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3.amazonaws.com/path/to/file.txt" - assert convert_storage_url_to_http(s3_url) == expected_http_url + assert convert_storage_url_to_http_url(s3_url) == expected_http_url def test_convert_s3_url_to_https() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3.amazonaws.com/path/to/file.txt" - assert convert_storage_url_to_http(s3_url, use_https=True) == expected_https_url + assert convert_storage_url_to_http_url(s3_url, use_https=True) == expected_https_url def test_convert_gs_url_to_http() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http(gs_url) == expected_http_url + assert convert_storage_url_to_http_url(gs_url) == expected_http_url gcs_url = "gcs://my-bucket/path/to/file.txt" expected_http_url = "http://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http(gcs_url) == expected_http_url + assert convert_storage_url_to_http_url(gcs_url) == expected_http_url def test_convert_gs_url_to_https() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http(gs_url, use_https=True) == expected_https_url + assert convert_storage_url_to_http_url(gs_url, use_https=True) == expected_https_url gcs_url = "gcs://my-bucket/path/to/file.txt" expected_https_url = "https://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http(gcs_url, use_https=True) == expected_https_url + assert convert_storage_url_to_http_url(gcs_url, use_https=True) == expected_https_url def test_convert_s3_url_to_http_with_region() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3-us-west-2.amazonaws.com/path/to/file.txt" - assert convert_storage_url_to_http(s3_url, region="us-west-2") == expected_http_url + assert convert_storage_url_to_http_url(s3_url, region="us-west-2") == expected_http_url def test_convert_s3_url_to_https_with_region() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3-us-east-1.amazonaws.com/path/to/file.txt" assert ( - convert_storage_url_to_http(s3_url, use_https=True, region="us-east-1") + convert_storage_url_to_http_url(s3_url, use_https=True, region="us-east-1") == expected_https_url ) @@ -52,7 +55,8 @@ def test_convert_s3_url_to_http_with_endpoint() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http(s3_url, endpoint="s3.custom-endpoint.com") == expected_http_url + convert_storage_url_to_http_url(s3_url, endpoint="s3.custom-endpoint.com") + == expected_http_url ) @@ -60,7 +64,7 @@ def test_convert_s3_url_to_https_with_endpoint() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http(s3_url, use_https=True, endpoint="s3.custom-endpoint.com") + convert_storage_url_to_http_url(s3_url, use_https=True, endpoint="s3.custom-endpoint.com") == expected_https_url ) @@ -68,28 +72,80 @@ def test_convert_s3_url_to_https_with_endpoint() -> None: def test_convert_gs_url_to_http_with_endpoint() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.custom-endpoint.com/path/to/file.txt" - assert convert_storage_url_to_http(gs_url, endpoint="custom-endpoint.com") == expected_http_url + assert ( + convert_storage_url_to_http_url(gs_url, endpoint="custom-endpoint.com") == expected_http_url + ) gcs_url = "gcs://my-bucket/path/to/file.txt" expected_http_url = "http://my-bucket.custom-endpoint.com/path/to/file.txt" - assert convert_storage_url_to_http(gcs_url, endpoint="custom-endpoint.com") == expected_http_url + assert ( + convert_storage_url_to_http_url(gcs_url, endpoint="custom-endpoint.com") + == expected_http_url + ) def test_convert_gs_url_to_https_with_endpoint() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http(gs_url, use_https=True, endpoint="custom-endpoint.com") + convert_storage_url_to_http_url(gs_url, use_https=True, endpoint="custom-endpoint.com") == expected_https_url ) gcs_url = "gcs://my-bucket/path/to/file.txt" expected_https_url = "https://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http(gcs_url, use_https=True, endpoint="custom-endpoint.com") + convert_storage_url_to_http_url(gcs_url, use_https=True, endpoint="custom-endpoint.com") == expected_https_url ) +def test_render_with_credentials_jsonl() -> None: + url = "https://example.com/data.jsonl" + access_key_id = "test_access_key" + secret_access_key = "test_secret_key" + file_format = "jsonl" + expected_output = """s3('https://example.com/data.jsonl','test_access_key','test_secret_key','JSONEachRow')""" + assert ( + render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] + == expected_output + ) + + +def test_render_with_credentials_parquet() -> None: + url = "https://example.com/data.parquet" + access_key_id = "test_access_key" + secret_access_key = "test_secret_key" + file_format = "parquet" + expected_output = """s3('https://example.com/data.parquet','test_access_key','test_secret_key','Parquet')""" + assert ( + render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] + == expected_output + ) + + +def test_render_without_credentials() -> None: + url = "https://example.com/data.jsonl" + file_format = "jsonl" + expected_output = """s3('https://example.com/data.jsonl',NOSIGN,'JSONEachRow')""" + assert render_s3_table_function(url, file_format=file_format) == expected_output # type: ignore[arg-type] + + +def test_render_invalid_file_format() -> None: + url = "https://example.com/data.unknown" + access_key_id = "test_access_key" + secret_access_key = "test_secret_key" + file_format = "unknown" + with pytest.raises(ValueError) as excinfo: + render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] + assert "Clickhouse s3/gcs staging only supports 'parquet' and 'jsonl'." == str(excinfo.value) + + def test_invalid_url_format() -> None: with pytest.raises(Exception) as exc_info: - convert_storage_url_to_http("invalid-url") + convert_storage_url_to_http_url("invalid-url") assert str(exc_info.value) == "Error converting storage URL to HTTP protocol: 'invalid-url'" + + +def test_render_missing_url() -> None: + with pytest.raises(TypeError) as excinfo: + render_s3_table_function() # type: ignore + assert "missing 1 required positional argument: 'url'" in str(excinfo.value) From 6cdf086e8412f993c6d8a082558129da24ad6c2a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 18 Mar 2024 23:08:59 +0200 Subject: [PATCH 021/127] Refactor URL conversion and staging for Clickhouse Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 106 ++++++++++-------- dlt/destinations/impl/clickhouse/utils.py | 14 +-- tests/load/clickhouse/test_utls.py | 33 +++--- 3 files changed, 79 insertions(+), 74 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index d8a56eb9ff..0170ac3d3b 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,3 +1,4 @@ +import os from copy import deepcopy from typing import ClassVar, Optional, Dict, List, Sequence from urllib.parse import urlparse @@ -5,6 +6,7 @@ from dlt.common.configuration.specs import ( CredentialsConfiguration, AwsCredentialsWithoutDefaults, + AzureCredentialsWithoutDefaults, ) from dlt.common.destination import DestinationCapabilitiesContext from dlt.common.destination.reference import ( @@ -26,9 +28,8 @@ ) from dlt.destinations.impl.clickhouse.sql_client import ClickhouseSqlClient from dlt.destinations.impl.clickhouse.utils import ( - convert_storage_url_to_http_url, + convert_storage_to_http_scheme, render_s3_table_function, - render_azure_blob_storage_table_function, ) from dlt.destinations.job_client_impl import ( SqlJobClientWithStaging, @@ -94,7 +95,6 @@ def __init__( self, file_path: str, table_name: str, - load_id: str, client: ClickhouseSqlClient, staging_credentials: Optional[CredentialsConfiguration] = None, ) -> None: @@ -111,54 +111,63 @@ def __init__( file_name = ( FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name ) + file_extension = os.path.splitext(file_name)[1].lower() + if file_extension not in ["parquet", "jsonl"]: + raise ValueError("Clickhouse staging only supports 'parquet' and 'jsonl' file formats.") - if bucket_path: - bucket_url = urlparse(bucket_path) - bucket_http_url = convert_storage_url_to_http_url(bucket_url) - bucket_scheme = bucket_url.scheme - - table_function: str - - if bucket_scheme in ("s3", "gs", "gcs"): - if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): - # Authenticated access. - table_function = render_s3_table_function( - bucket_http_url, - staging_credentials.aws_secret_access_key, - staging_credentials.aws_secret_access_key, - ) - else: - # Unsigned access. - table_function = render_s3_table_function(bucket_http_url) - elif bucket_scheme in ("az", "abfs"): - if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): - # Authenticated access. - table_function = render_azure_blob_storage_table_function( - bucket_http_url, - staging_credentials.aws_secret_access_key, - staging_credentials.aws_secret_access_key, - ) - else: - # Unsigned access. - table_function = render_azure_blob_storage_table_function(bucket_http_url) - else: - # Local file. - raise NotImplementedError + if not bucket_path: + # Local filesystem. + raise NotImplementedError("Only object storage is supported.") - with client.begin_transaction(): - # PUT and COPY in one transaction if local file, otherwise only copy. - if not bucket_path: - client.execute_sql( - f'PUT file://{file_path} @{stage_name}/"{load_id}" OVERWRITE = TRUE,' - " AUTO_COMPRESS = FALSE" + bucket_url = urlparse(bucket_path) + bucket_scheme = bucket_url.scheme + + table_function: str + + if bucket_scheme in ("s3", "gs", "gcs"): + bucket_http_url = convert_storage_to_http_scheme(bucket_url) + + table_function = ( + render_s3_table_function( + bucket_http_url, + staging_credentials.aws_secret_access_key, + staging_credentials.aws_secret_access_key, + file_format=file_extension, # type: ignore[arg-type] + ) + if isinstance(staging_credentials, AwsCredentialsWithoutDefaults) + else render_s3_table_function( + bucket_http_url, + file_format=file_extension, # type: ignore[arg-type] + ) + ) + elif bucket_scheme in ("az", "abfs"): + if isinstance(staging_credentials, AzureCredentialsWithoutDefaults): + # Authenticated access. + account_name = staging_credentials.azure_storage_account_name + storage_account_url = ( + f"{staging_credentials.azure_storage_account_name}.blob.core.windows.net" ) - client.execute_sql(f"""COPY INTO {qualified_table_name} - {from_clause} - {files_clause} - {credentials_clause} - FILE_FORMAT = {source_format} - MATCH_BY_COLUMN_NAME='CASE_INSENSITIVE' - """) + account_key = staging_credentials.azure_storage_sas_token + container_name = bucket_url.netloc + blobpath = bucket_url.path + + format_mapping = {"jsonl": "JSONEachRow", "parquet": "Parquet"} + clickhouse_format = format_mapping[file_extension] + + table_function = ( + f"azureBlobStorage('{storage_account_url}','{container_name}','{ blobpath }','{ account_name }','{ account_key }','{ clickhouse_format}')" + ) + + else: + # Unsigned access. + raise NotImplementedError( + "Unsigned Azure Blob Storage access from Clickhouse isn't supported as yet." + ) + + with client.begin_transaction(): + client.execute_sql( + f"""INSERT INTO {qualified_table_name} SELECT * FROM {table_function}""" + ) def state(self) -> TLoadJobState: return "completed" @@ -188,7 +197,6 @@ def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> return super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( file_path, table["name"], - load_id, self.sql_client, staging_credentials=( self.config.staging_config.credentials if self.config.staging_config else None diff --git a/dlt/destinations/impl/clickhouse/utils.py b/dlt/destinations/impl/clickhouse/utils.py index 543a07753b..6297712943 100644 --- a/dlt/destinations/impl/clickhouse/utils.py +++ b/dlt/destinations/impl/clickhouse/utils.py @@ -1,5 +1,5 @@ from typing import Union, Optional, Literal -from urllib.parse import urlparse, ParseResult +from urllib.parse import urlparse, ParseResult, urlunparse from jinja2 import Template @@ -7,7 +7,7 @@ S3_TABLE_FUNCTION_FILE_FORMATS = Literal["jsonl", "parquet"] -def convert_storage_url_to_http_url( +def convert_storage_to_http_scheme( url: Union[str, ParseResult], use_https: bool = False, endpoint: str = None, region: str = None ) -> str: try: @@ -33,11 +33,9 @@ def convert_storage_url_to_http_url( "gs": "storage.googleapis.com", "gcs": "storage.googleapis.com", } - domain = storage_domains[parsed_url.scheme] return f"{protocol}://{bucket_name}.{domain}/{object_key}" - except Exception as e: raise Exception(f"Error converting storage URL to HTTP protocol: '{url}'") from e @@ -54,7 +52,9 @@ def render_s3_table_function( format_mapping = {"jsonl": "JSONEachRow", "parquet": "Parquet"} clickhouse_format = format_mapping[file_format] - template = Template("""s3('{{ url }}'{% if access_key_id and secret_access_key %},'{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}')""") + template = Template( + """s3('{{ url }}'{% if access_key_id and secret_access_key %},'{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}')""" + ) return template.render( url=url, @@ -62,7 +62,3 @@ def render_s3_table_function( secret_access_key=secret_access_key, clickhouse_format=clickhouse_format, ).strip() - - -def render_azure_blob_storage_table_function(): - raise NotImplementedError diff --git a/tests/load/clickhouse/test_utls.py b/tests/load/clickhouse/test_utls.py index 5176899775..4d672fb7de 100644 --- a/tests/load/clickhouse/test_utls.py +++ b/tests/load/clickhouse/test_utls.py @@ -1,7 +1,7 @@ import pytest from dlt.destinations.impl.clickhouse.utils import ( - convert_storage_url_to_http_url, + convert_storage_to_http_scheme, render_s3_table_function, ) @@ -9,44 +9,44 @@ def test_convert_s3_url_to_http() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3.amazonaws.com/path/to/file.txt" - assert convert_storage_url_to_http_url(s3_url) == expected_http_url + assert convert_storage_to_http_scheme(s3_url) == expected_http_url def test_convert_s3_url_to_https() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3.amazonaws.com/path/to/file.txt" - assert convert_storage_url_to_http_url(s3_url, use_https=True) == expected_https_url + assert convert_storage_to_http_scheme(s3_url, use_https=True) == expected_https_url def test_convert_gs_url_to_http() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http_url(gs_url) == expected_http_url + assert convert_storage_to_http_scheme(gs_url) == expected_http_url gcs_url = "gcs://my-bucket/path/to/file.txt" expected_http_url = "http://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http_url(gcs_url) == expected_http_url + assert convert_storage_to_http_scheme(gcs_url) == expected_http_url def test_convert_gs_url_to_https() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http_url(gs_url, use_https=True) == expected_https_url + assert convert_storage_to_http_scheme(gs_url, use_https=True) == expected_https_url gcs_url = "gcs://my-bucket/path/to/file.txt" expected_https_url = "https://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_url_to_http_url(gcs_url, use_https=True) == expected_https_url + assert convert_storage_to_http_scheme(gcs_url, use_https=True) == expected_https_url def test_convert_s3_url_to_http_with_region() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3-us-west-2.amazonaws.com/path/to/file.txt" - assert convert_storage_url_to_http_url(s3_url, region="us-west-2") == expected_http_url + assert convert_storage_to_http_scheme(s3_url, region="us-west-2") == expected_http_url def test_convert_s3_url_to_https_with_region() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3-us-east-1.amazonaws.com/path/to/file.txt" assert ( - convert_storage_url_to_http_url(s3_url, use_https=True, region="us-east-1") + convert_storage_to_http_scheme(s3_url, use_https=True, region="us-east-1") == expected_https_url ) @@ -55,7 +55,7 @@ def test_convert_s3_url_to_http_with_endpoint() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http_url(s3_url, endpoint="s3.custom-endpoint.com") + convert_storage_to_http_scheme(s3_url, endpoint="s3.custom-endpoint.com") == expected_http_url ) @@ -64,7 +64,7 @@ def test_convert_s3_url_to_https_with_endpoint() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http_url(s3_url, use_https=True, endpoint="s3.custom-endpoint.com") + convert_storage_to_http_scheme(s3_url, use_https=True, endpoint="s3.custom-endpoint.com") == expected_https_url ) @@ -73,12 +73,12 @@ def test_convert_gs_url_to_http_with_endpoint() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http_url(gs_url, endpoint="custom-endpoint.com") == expected_http_url + convert_storage_to_http_scheme(gs_url, endpoint="custom-endpoint.com") == expected_http_url ) gcs_url = "gcs://my-bucket/path/to/file.txt" expected_http_url = "http://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http_url(gcs_url, endpoint="custom-endpoint.com") + convert_storage_to_http_scheme(gcs_url, endpoint="custom-endpoint.com") == expected_http_url ) @@ -87,13 +87,13 @@ def test_convert_gs_url_to_https_with_endpoint() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http_url(gs_url, use_https=True, endpoint="custom-endpoint.com") + convert_storage_to_http_scheme(gs_url, use_https=True, endpoint="custom-endpoint.com") == expected_https_url ) gcs_url = "gcs://my-bucket/path/to/file.txt" expected_https_url = "https://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_url_to_http_url(gcs_url, use_https=True, endpoint="custom-endpoint.com") + convert_storage_to_http_scheme(gcs_url, use_https=True, endpoint="custom-endpoint.com") == expected_https_url ) @@ -129,6 +129,7 @@ def test_render_without_credentials() -> None: assert render_s3_table_function(url, file_format=file_format) == expected_output # type: ignore[arg-type] + def test_render_invalid_file_format() -> None: url = "https://example.com/data.unknown" access_key_id = "test_access_key" @@ -141,7 +142,7 @@ def test_render_invalid_file_format() -> None: def test_invalid_url_format() -> None: with pytest.raises(Exception) as exc_info: - convert_storage_url_to_http_url("invalid-url") + convert_storage_to_http_scheme("invalid-url") assert str(exc_info.value) == "Error converting storage URL to HTTP protocol: 'invalid-url'" From 6606f5ed386594605cc0c0c060555b7935a98d6b Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 18 Mar 2024 23:32:07 +0200 Subject: [PATCH 022/127] Tests don't pass, but they are there #1055 Signed-off-by: Marcel Coetzee --- .../test_clickhouse_configuration.py | 118 ++---------------- .../test_clickhouse_table_builder.py | 47 +++---- tests/load/clickhouse/test_utls.py | 12 +- 3 files changed, 41 insertions(+), 136 deletions(-) diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index 89b78bfe7b..3406aad902 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -1,26 +1,18 @@ -import os -import pytest -from pathlib import Path -from dlt.common.libs.sql_alchemy import make_url -from dlt.destinations.impl.clickhouse.configuration import ClickhouseCredentials - - -pytest.importorskip("clickhouse") - from dlt.common.configuration.resolve import resolve_configuration -from dlt.common.configuration.exceptions import ConfigurationValueError +from dlt.common.libs.sql_alchemy import make_url from dlt.common.utils import digest128 - +from dlt.destinations.impl.clickhouse.configuration import ( + ClickhouseCredentials, + ClickhouseClientConfiguration, +) from dlt.destinations.impl.snowflake.configuration import ( SnowflakeClientConfiguration, SnowflakeCredentials, ) -from tests.common.configuration.utils import environment - def test_connection_string_with_all_params() -> None: - url = "snowflake://user1:pass1@host1/db1?warehouse=warehouse1&role=role1&private_key=cGs%3D&private_key_passphrase=paphr" + url = "clickhouse://user1:pass1@host1:9000/db1" creds = ClickhouseCredentials() creds.parse_native_representation(url) @@ -29,10 +21,7 @@ def test_connection_string_with_all_params() -> None: assert creds.username == "user1" assert creds.password == "pass1" assert creds.host == "host1" - assert creds.warehouse == "warehouse1" - assert creds.role == "role1" - assert creds.private_key == "cGs=" - assert creds.private_key_passphrase == "paphr" + assert creds.port == 9000 expected = make_url(url) @@ -40,99 +29,12 @@ def test_connection_string_with_all_params() -> None: assert make_url(creds.to_native_representation()) == expected -def test_to_connector_params() -> None: - # PEM key - pkey_str = Path("./tests/common/cases/secrets/encrypted-private-key").read_text("utf8") - - creds = SnowflakeCredentials() - creds.private_key = pkey_str # type: ignore[assignment] - creds.private_key_passphrase = "12345" # type: ignore[assignment] - creds.username = "user1" - creds.database = "db1" - creds.host = "host1" - creds.warehouse = "warehouse1" - creds.role = "role1" - - params = creds.to_connector_params() - - assert isinstance(params["private_key"], bytes) - params.pop("private_key") - - assert params == dict( - user="user1", - database="db1", - account="host1", - password=None, - warehouse="warehouse1", - role="role1", - ) - - # base64 encoded DER key - pkey_str = Path("./tests/common/cases/secrets/encrypted-private-key-base64").read_text("utf8") - - creds = SnowflakeCredentials() - creds.private_key = pkey_str # type: ignore[assignment] - creds.private_key_passphrase = "12345" # type: ignore[assignment] - creds.username = "user1" - creds.database = "db1" - creds.host = "host1" - creds.warehouse = "warehouse1" - creds.role = "role1" - - params = creds.to_connector_params() - - assert isinstance(params["private_key"], bytes) - params.pop("private_key") - - assert params == dict( - user="user1", - database="db1", - account="host1", - password=None, - warehouse="warehouse1", - role="role1", - ) - - -def test_snowflake_credentials_native_value(environment) -> None: - with pytest.raises(ConfigurationValueError): - resolve_configuration( - SnowflakeCredentials(), - explicit_value="snowflake://user1@host1/db1?warehouse=warehouse1&role=role1", - ) - # set password via env - os.environ["CREDENTIALS__PASSWORD"] = "pass" - c = resolve_configuration( - SnowflakeCredentials(), - explicit_value="snowflake://user1@host1/db1?warehouse=warehouse1&role=role1", - ) - assert c.is_resolved() - assert c.password == "pass" - # # but if password is specified - it is final - c = resolve_configuration( - SnowflakeCredentials(), - explicit_value="snowflake://user1:pass1@host1/db1?warehouse=warehouse1&role=role1", - ) - assert c.is_resolved() - assert c.password == "pass1" - - # set PK via env - del os.environ["CREDENTIALS__PASSWORD"] - os.environ["CREDENTIALS__PRIVATE_KEY"] = "pk" - c = resolve_configuration( - SnowflakeCredentials(), - explicit_value="snowflake://user1@host1/db1?warehouse=warehouse1&role=role1", - ) - assert c.is_resolved() - assert c.private_key == "pk" - - -def test_snowflake_configuration() -> None: +def test_clickhouse_configuration() -> None: # def empty fingerprint - assert SnowflakeClientConfiguration().fingerprint() == "" + assert ClickhouseClientConfiguration().fingerprint() == "" # based on host c = resolve_configuration( SnowflakeCredentials(), - explicit_value="snowflake://user1:pass@host1/db1?warehouse=warehouse1&role=role1", + explicit_value="clickhouse://user1:pass1@host1:9000/db1", ) assert SnowflakeClientConfiguration(credentials=c).fingerprint() == digest128("host1") diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 1e80a61f1c..da6a23f668 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -3,33 +3,34 @@ import pytest import sqlfluff -from dlt.common.utils import uniq_id from dlt.common.schema import Schema -from dlt.destinations.impl.snowflake.snowflake import SnowflakeClient -from dlt.destinations.impl.snowflake.configuration import ( - SnowflakeClientConfiguration, - SnowflakeCredentials, +from dlt.common.utils import uniq_id +from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient +from dlt.destinations.impl.clickhouse.configuration import ( + ClickhouseCredentials, + ClickhouseClientConfiguration, ) -from dlt.destinations.exceptions import DestinationSchemaWillNotUpdate - from tests.load.utils import TABLE_UPDATE, empty_schema @pytest.fixture -def snowflake_client(empty_schema: Schema) -> SnowflakeClient: - # return client without opening connection - creds = SnowflakeCredentials() - return SnowflakeClient( +def clickhouse_client(empty_schema: Schema) -> ClickhouseClient: + # Return a client without opening connection. + creds = ClickhouseCredentials() + return ClickhouseClient( empty_schema, - SnowflakeClientConfiguration(dataset_name="test_" + uniq_id(), credentials=creds), + ClickhouseClientConfiguration(dataset_name=f"test_{uniq_id()}", credentials=creds), ) -def test_create_table(snowflake_client: SnowflakeClient) -> None: - statements = snowflake_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) +pytest.mark.usefixtures("empty_schema") + + +def test_create_table(clickhouse_client: ClickhouseClient) -> None: + statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) assert len(statements) == 1 sql = statements[0] - sqlfluff.parse(sql, dialect="snowflake") + sqlfluff.parse(sql, dialect="clickhouse") assert sql.strip().startswith("CREATE TABLE") assert "EVENT_TEST_TABLE" in sql @@ -45,13 +46,13 @@ def test_create_table(snowflake_client: SnowflakeClient) -> None: assert '"COL10" DATE NOT NULL' in sql -def test_alter_table(snowflake_client: SnowflakeClient) -> None: - statements = snowflake_client._get_table_update_sql("event_test_table", TABLE_UPDATE, True) +def test_alter_table(clickhouse_client: ClickhouseClient) -> None: + statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, True) assert len(statements) == 1 sql = statements[0] - # TODO: sqlfluff doesn't parse snowflake multi ADD COLUMN clause correctly - # sqlfluff.parse(sql, dialect='snowflake') + # TODO: sqlfluff doesn't parse clickhouse multi ADD COLUMN clause correctly + # sqlfluff.parse(sql, dialect='clickhouse') assert sql.startswith("ALTER TABLE") assert sql.count("ALTER TABLE") == 1 @@ -70,24 +71,24 @@ def test_alter_table(snowflake_client: SnowflakeClient) -> None: mod_table = deepcopy(TABLE_UPDATE) mod_table.pop(0) - sql = snowflake_client._get_table_update_sql("event_test_table", mod_table, True)[0] + sql = clickhouse_client._get_table_update_sql("event_test_table", mod_table, True)[0] assert '"COL1"' not in sql assert '"COL2" FLOAT NOT NULL' in sql -def test_create_table_with_partition_and_cluster(snowflake_client: SnowflakeClient) -> None: +def test_create_table_with_partition_and_cluster(clickhouse_client: ClickhouseClient) -> None: mod_update = deepcopy(TABLE_UPDATE) # timestamp mod_update[3]["partition"] = True mod_update[4]["cluster"] = True mod_update[1]["cluster"] = True - statements = snowflake_client._get_table_update_sql("event_test_table", mod_update, False) + statements = clickhouse_client._get_table_update_sql("event_test_table", mod_update, False) assert len(statements) == 1 sql = statements[0] # TODO: Can't parse cluster by - # sqlfluff.parse(sql, dialect="snowflake") + # sqlfluff.parse(sql, dialect="clickhouse") # clustering must be the last assert sql.endswith('CLUSTER BY ("COL2","COL5")') diff --git a/tests/load/clickhouse/test_utls.py b/tests/load/clickhouse/test_utls.py index 4d672fb7de..9c9c862623 100644 --- a/tests/load/clickhouse/test_utls.py +++ b/tests/load/clickhouse/test_utls.py @@ -78,8 +78,7 @@ def test_convert_gs_url_to_http_with_endpoint() -> None: gcs_url = "gcs://my-bucket/path/to/file.txt" expected_http_url = "http://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( - convert_storage_to_http_scheme(gcs_url, endpoint="custom-endpoint.com") - == expected_http_url + convert_storage_to_http_scheme(gcs_url, endpoint="custom-endpoint.com") == expected_http_url ) @@ -103,7 +102,9 @@ def test_render_with_credentials_jsonl() -> None: access_key_id = "test_access_key" secret_access_key = "test_secret_key" file_format = "jsonl" - expected_output = """s3('https://example.com/data.jsonl','test_access_key','test_secret_key','JSONEachRow')""" + expected_output = ( + """s3('https://example.com/data.jsonl','test_access_key','test_secret_key','JSONEachRow')""" + ) assert ( render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] == expected_output @@ -115,7 +116,9 @@ def test_render_with_credentials_parquet() -> None: access_key_id = "test_access_key" secret_access_key = "test_secret_key" file_format = "parquet" - expected_output = """s3('https://example.com/data.parquet','test_access_key','test_secret_key','Parquet')""" + expected_output = ( + """s3('https://example.com/data.parquet','test_access_key','test_secret_key','Parquet')""" + ) assert ( render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] == expected_output @@ -129,7 +132,6 @@ def test_render_without_credentials() -> None: assert render_s3_table_function(url, file_format=file_format) == expected_output # type: ignore[arg-type] - def test_render_invalid_file_format() -> None: url = "https://example.com/data.unknown" access_key_id = "test_access_key" From 45bafa7914e8ef9b3702a9c4fa5eda0a3dc9ef81 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 19 Mar 2024 11:49:53 +0200 Subject: [PATCH 023/127] Fix poetry collision #1055 Signed-off-by: Marcel Coetzee --- poetry.lock | 8141 +++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 3859 insertions(+), 4284 deletions(-) diff --git a/poetry.lock b/poetry.lock index e0869736fc..12df185d36 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4,17 +4,23 @@ name = "about-time" version = "4.2.1" description = "Easily measure timing and throughput of code blocks, with beautiful human friendly representations." -category = "dev" optional = false python-versions = ">=3.7, <4" +files = [ + {file = "about-time-4.2.1.tar.gz", hash = "sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece"}, + {file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"}, +] [[package]] name = "adlfs" version = "2023.8.0" description = "Access Azure Datalake Gen1 with fsspec and dask" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "adlfs-2023.8.0-py3-none-any.whl", hash = "sha256:3eb248a3c2a30b419f1147bd7676d156b5219f96ef7f11d47166afd2a3bdb07e"}, + {file = "adlfs-2023.8.0.tar.gz", hash = "sha256:07e804f6df4593acfcaf01025b162e30ac13e523d3570279c98b2d91a18026d9"}, +] [package.dependencies] aiohttp = ">=3.7.0" @@ -31,9 +37,12 @@ docs = ["furo", "myst-parser", "numpydoc", "sphinx"] name = "agate" version = "1.7.1" description = "A data analysis library that is optimized for humans instead of machines." -category = "main" optional = false python-versions = "*" +files = [ + {file = "agate-1.7.1-py2.py3-none-any.whl", hash = "sha256:23f9f412f74f97b72f82b1525ab235cc816bc8c8525d968a091576a0dbc54a5f"}, + {file = "agate-1.7.1.tar.gz", hash = "sha256:eadf46d980168b8922d5d396d6258eecd5e7dbef7e6f0c0b71e968545ea96389"}, +] [package.dependencies] Babel = ">=2.0" @@ -51,9 +60,12 @@ test = ["PyICU (>=2.4.2)", "coverage (>=3.7.1)", "cssselect (>=0.9.1)", "lxml (> name = "aiobotocore" version = "2.11.2" description = "Async client for aws services using botocore and aiohttp" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "aiobotocore-2.11.2-py3-none-any.whl", hash = "sha256:487fede588040bfa3a43df945275c28c1c73ca75bf705295adb9fbadd2e89be7"}, + {file = "aiobotocore-2.11.2.tar.gz", hash = "sha256:6dd7352248e3523019c5a54a395d2b1c31080697fc80a9ad2672de4eec8c7abd"}, +] [package.dependencies] aiohttp = ">=3.7.4.post0,<4.0.0" @@ -69,9 +81,97 @@ boto3 = ["boto3 (>=1.33.2,<1.34.35)"] name = "aiohttp" version = "3.8.5" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, + {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, + {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, + {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, + {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, + {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, + {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, + {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, + {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, + {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, + {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, +] [package.dependencies] aiosignal = ">=1.1.2" @@ -89,9 +189,12 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aioitertools" version = "0.11.0" description = "itertools and builtins for AsyncIO and mixed iterables" -category = "main" optional = true python-versions = ">=3.6" +files = [ + {file = "aioitertools-0.11.0-py3-none-any.whl", hash = "sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394"}, + {file = "aioitertools-0.11.0.tar.gz", hash = "sha256:42c68b8dd3a69c2bf7f2233bf7df4bb58b557bca5252ac02ed5187bbc67d6831"}, +] [package.dependencies] typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} @@ -100,9 +203,12 @@ typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] [package.dependencies] frozenlist = ">=1.1.0" @@ -111,9 +217,12 @@ frozenlist = ">=1.1.0" name = "alembic" version = "1.12.0" description = "A database migration tool for SQLAlchemy." -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, + {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, +] [package.dependencies] importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} @@ -129,9 +238,12 @@ tz = ["python-dateutil"] name = "alive-progress" version = "3.1.4" description = "A new kind of Progress Bar, with real-time throughput, ETA, and very cool animations!" -category = "dev" optional = false python-versions = ">=3.7, <4" +files = [ + {file = "alive-progress-3.1.4.tar.gz", hash = "sha256:74a95d8d0d42bc99d3a3725dbd06ebb852245f1b64e301a7c375b92b22663f7b"}, + {file = "alive_progress-3.1.4-py3-none-any.whl", hash = "sha256:c80ad87ce9c1054b01135a87fae69ecebbfc2107497ae87cbe6aec7e534903db"}, +] [package.dependencies] about-time = "4.2.1" @@ -141,9 +253,12 @@ grapheme = "0.6.0" name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} @@ -152,17 +267,23 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "ansicon" version = "1.89.0" description = "Python wrapper for loading Jason Hood's ANSICON" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] [[package]] name = "anyio" version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, +] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} @@ -178,9 +299,12 @@ trio = ["trio (>=0.22)"] name = "apache-airflow" version = "2.8.1" description = "Programmatically author, schedule and monitor data pipelines" -category = "dev" optional = false python-versions = "<3.12,~=3.8" +files = [ + {file = "apache_airflow-2.8.1-py3-none-any.whl", hash = "sha256:8178b3fd22a8766beb2e2972352f37402994a2ea4356106a6763e05807efaa88"}, + {file = "apache_airflow-2.8.1.tar.gz", hash = "sha256:7443d82b790886c5ec137a8fdb94d672e33e81336713ca7320b4a1bbad443a9c"}, +] [package.dependencies] alembic = ">=1.6.3,<2.0" @@ -384,9 +508,12 @@ zendesk = ["apache-airflow-providers-zendesk"] name = "apache-airflow-providers-common-io" version = "1.3.0" description = "Provider package apache-airflow-providers-common-io for Apache Airflow" -category = "dev" optional = false python-versions = "~=3.8" +files = [ + {file = "apache_airflow_providers_common_io-1.3.0-py3-none-any.whl", hash = "sha256:a67c6dd3cb419c68fc1a9ed62f0f434426852e15a46c3159f367b3961332955d"}, + {file = "apache_airflow_providers_common_io-1.3.0.tar.gz", hash = "sha256:7172620a2370031970df2212a9f694a5ff82240f7e498b8b7dfdbae7e6c882d6"}, +] [package.dependencies] apache-airflow = ">=2.8.0" @@ -398,9 +525,12 @@ openlineage = ["apache-airflow-providers-openlineage"] name = "apache-airflow-providers-common-sql" version = "1.7.1" description = "Provider for Apache Airflow. Implements apache-airflow-providers-common-sql package" -category = "dev" optional = false python-versions = "~=3.8" +files = [ + {file = "apache-airflow-providers-common-sql-1.7.1.tar.gz", hash = "sha256:ba37f795d9656a87cf4661edc381b8ecfe930272c59324b59f8a158fd0971aeb"}, + {file = "apache_airflow_providers_common_sql-1.7.1-py3-none-any.whl", hash = "sha256:36da2f51b51a64765b0ed5e6a5fece8eaa3ca173dfbff803e2fe2a0afbb90944"}, +] [package.dependencies] apache-airflow = ">=2.4.0" @@ -414,9 +544,12 @@ pandas = ["pandas (>=0.17.1)"] name = "apache-airflow-providers-ftp" version = "3.5.1" description = "Provider for Apache Airflow. Implements apache-airflow-providers-ftp package" -category = "dev" optional = false python-versions = "~=3.8" +files = [ + {file = "apache-airflow-providers-ftp-3.5.1.tar.gz", hash = "sha256:dc6dc524dc7454857a0812154d7540172e36db3a87e48a4a91918ebf80898bbf"}, + {file = "apache_airflow_providers_ftp-3.5.1-py3-none-any.whl", hash = "sha256:e4ea77d6276355acfe2392c12155db7b9d51be460b7673b616dc1d8bee03c1d7"}, +] [package.dependencies] apache-airflow = ">=2.4.0" @@ -428,9 +561,12 @@ openlineage = ["apache-airflow-providers-openlineage"] name = "apache-airflow-providers-http" version = "4.5.1" description = "Provider for Apache Airflow. Implements apache-airflow-providers-http package" -category = "dev" optional = false python-versions = "~=3.8" +files = [ + {file = "apache-airflow-providers-http-4.5.1.tar.gz", hash = "sha256:ec90920ff980fc264af9811dc72c37ef272bcdb3d007c7114e12366559426460"}, + {file = "apache_airflow_providers_http-4.5.1-py3-none-any.whl", hash = "sha256:702f26938bc22684eefecd297c2b0809793f9e43b8d911d807a29f21e69da179"}, +] [package.dependencies] aiohttp = "*" @@ -443,9 +579,12 @@ requests-toolbelt = "*" name = "apache-airflow-providers-imap" version = "3.3.1" description = "Provider for Apache Airflow. Implements apache-airflow-providers-imap package" -category = "dev" optional = false python-versions = "~=3.8" +files = [ + {file = "apache-airflow-providers-imap-3.3.1.tar.gz", hash = "sha256:40bac2a75e4dfbcd7d397776d90d03938facaf2707acc6cc119a8db684e53f77"}, + {file = "apache_airflow_providers_imap-3.3.1-py3-none-any.whl", hash = "sha256:adb6ef7864a5a8e245fbbd555bb4ef1eecf5b094d6d23ca0edc5f0aded50490d"}, +] [package.dependencies] apache-airflow = ">=2.4.0" @@ -454,9 +593,12 @@ apache-airflow = ">=2.4.0" name = "apache-airflow-providers-sqlite" version = "3.4.3" description = "Provider for Apache Airflow. Implements apache-airflow-providers-sqlite package" -category = "dev" optional = false python-versions = "~=3.8" +files = [ + {file = "apache-airflow-providers-sqlite-3.4.3.tar.gz", hash = "sha256:347d2db03eaa5ea9fef414666565ffa5e849935cbc30e37237edcaa822b5ced8"}, + {file = "apache_airflow_providers_sqlite-3.4.3-py3-none-any.whl", hash = "sha256:4ffa6a50f0ea1b4e51240b657dfec3fb026c87bdfa71af908a56461df6a6f2e0"}, +] [package.dependencies] apache-airflow = ">=2.4.0" @@ -469,9 +611,12 @@ common-sql = ["apache-airflow-providers-common-sql"] name = "apispec" version = "6.3.0" description = "A pluggable API specification generator. Currently supports the OpenAPI Specification (f.k.a. the Swagger specification)." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "apispec-6.3.0-py3-none-any.whl", hash = "sha256:95a0b9355785df998bb0e9b939237a30ee4c7428fd6ef97305eae3da06b9b339"}, + {file = "apispec-6.3.0.tar.gz", hash = "sha256:6cb08d92ce73ff0b3bf46cb2ea5c00d57289b0f279fb0256a3df468182ba5344"}, +] [package.dependencies] packaging = ">=21.3" @@ -490,17 +635,23 @@ yaml = ["PyYAML (>=3.10)"] name = "appdirs" version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = "*" +files = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] [[package]] name = "argcomplete" version = "3.1.1" description = "Bash tab completion for argparse" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "argcomplete-3.1.1-py3-none-any.whl", hash = "sha256:35fa893a88deea85ea7b20d241100e64516d6af6d7b0ae2bed1d263d26f70948"}, + {file = "argcomplete-3.1.1.tar.gz", hash = "sha256:6c4c563f14f01440aaffa3eae13441c5db2357b5eec639abe7c0b15334627dff"}, +] [package.extras] test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] @@ -509,9 +660,12 @@ test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, +] [package.dependencies] typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} @@ -523,17 +677,23 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "asn1crypto" version = "1.5.1" description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" -category = "main" optional = true python-versions = "*" +files = [ + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, +] [[package]] name = "astatine" version = "0.3.3" description = "Some handy helper functions for Python's AST module." -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "astatine-0.3.3-py3-none-any.whl", hash = "sha256:6d8c914f01fbea252cb8f31563f2e766a9ab03c02b9bcc37d18f7d9138828401"}, + {file = "astatine-0.3.3.tar.gz", hash = "sha256:0c58a7844b5890ff16da07dbfeb187341d8324cb4378940f89d795cbebebce08"}, +] [package.dependencies] asttokens = ">=1.1" @@ -543,9 +703,12 @@ domdf-python-tools = ">=2.7.0" name = "asttokens" version = "2.3.0" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "asttokens-2.3.0-py2.py3-none-any.whl", hash = "sha256:bef1a51bc256d349e9f94e7e40e44b705ed1162f55294220dd561d24583d9877"}, + {file = "asttokens-2.3.0.tar.gz", hash = "sha256:2552a88626aaa7f0f299f871479fc755bd4e7c11e89078965e928fb7bb9a6afe"}, +] [package.dependencies] six = ">=1.12.0" @@ -557,9 +720,12 @@ test = ["astroid", "pytest"] name = "astunparse" version = "1.6.3" description = "An AST unparser for Python" -category = "main" optional = false python-versions = "*" +files = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, +] [package.dependencies] six = ">=1.6.1,<2.0" @@ -569,17 +735,23 @@ wheel = ">=0.23.0,<1.0" name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] [[package]] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] @@ -592,9 +764,12 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "authlib" version = "1.2.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -category = "main" optional = true python-versions = "*" +files = [ + {file = "Authlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:c88984ea00149a90e3537c964327da930779afa4564e354edfd98410bea01911"}, + {file = "Authlib-1.2.1.tar.gz", hash = "sha256:421f7c6b468d907ca2d9afede256f068f87e34d23dd221c07d13d4c234726afb"}, +] [package.dependencies] cryptography = ">=3.2" @@ -603,9 +778,12 @@ cryptography = ">=3.2" name = "azure-core" version = "1.29.3" description = "Microsoft Azure Core Library for Python" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "azure-core-1.29.3.tar.gz", hash = "sha256:c92700af982e71c8c73de9f4c20da8b3f03ce2c22d13066e4d416b4629c87903"}, + {file = "azure_core-1.29.3-py3-none-any.whl", hash = "sha256:f8b2910f92b66293d93bd00564924ad20ad48f4a1e150577cf18d1e7d4f9263c"}, +] [package.dependencies] requests = ">=2.18.4" @@ -619,9 +797,12 @@ aio = ["aiohttp (>=3.0)"] name = "azure-datalake-store" version = "0.0.53" description = "Azure Data Lake Store Filesystem Client Library for Python" -category = "main" optional = true python-versions = "*" +files = [ + {file = "azure-datalake-store-0.0.53.tar.gz", hash = "sha256:05b6de62ee3f2a0a6e6941e6933b792b800c3e7f6ffce2fc324bc19875757393"}, + {file = "azure_datalake_store-0.0.53-py2.py3-none-any.whl", hash = "sha256:a30c902a6e360aa47d7f69f086b426729784e71c536f330b691647a51dc42b2b"}, +] [package.dependencies] cffi = "*" @@ -632,9 +813,12 @@ requests = ">=2.20.0" name = "azure-identity" version = "1.14.0" description = "Microsoft Azure Identity Library for Python" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "azure-identity-1.14.0.zip", hash = "sha256:72441799f8c5c89bfe21026965e266672a7c5d050c2c65119ef899dd5362e2b1"}, + {file = "azure_identity-1.14.0-py3-none-any.whl", hash = "sha256:edabf0e010eb85760e1dd19424d5e8f97ba2c9caff73a16e7b30ccbdbcce369b"}, +] [package.dependencies] azure-core = ">=1.11.0,<2.0.0" @@ -646,9 +830,12 @@ msal-extensions = ">=0.3.0,<2.0.0" name = "azure-storage-blob" version = "12.17.0" description = "Microsoft Azure Blob Storage Client Library for Python" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "azure-storage-blob-12.17.0.zip", hash = "sha256:c14b785a17050b30fc326a315bdae6bc4a078855f4f94a4c303ad74a48dc8c63"}, + {file = "azure_storage_blob-12.17.0-py3-none-any.whl", hash = "sha256:0016e0c549a80282d7b4920c03f2f4ba35c53e6e3c7dbcd2a4a8c8eb3882c1e7"}, +] [package.dependencies] azure-core = ">=1.28.0,<2.0.0" @@ -663,9 +850,12 @@ aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} @@ -674,17 +864,37 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "dev" optional = false python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] [[package]] name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, +] [package.extras] tzdata = ["tzdata"] @@ -693,9 +903,12 @@ tzdata = ["tzdata"] name = "bandit" version = "1.7.5" description = "Security oriented static analyser for python code." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, + {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, +] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} @@ -713,9 +926,12 @@ yaml = ["PyYAML"] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "main" optional = true python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] [package.dependencies] soupsieve = ">1.2" @@ -728,10 +944,33 @@ lxml = ["lxml"] name = "black" version = "23.9.1" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" - +files = [ + {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, + {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, + {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, + {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, + {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, + {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, + {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, + {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, + {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, + {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, + {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, +] + [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" @@ -751,9 +990,12 @@ uvloop = ["uvloop (>=0.15.2)"] name = "blessed" version = "1.20.0" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." -category = "dev" optional = false python-versions = ">=2.7" +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] [package.dependencies] jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} @@ -764,17 +1006,23 @@ wcwidth = ">=0.1.4" name = "blinker" version = "1.6.2" description = "Fast, simple object-to-object and broadcast signaling" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, + {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, +] [[package]] name = "boto3" version = "1.34.34" description = "The AWS SDK for Python" -category = "main" optional = true python-versions = ">= 3.8" +files = [ + {file = "boto3-1.34.34-py3-none-any.whl", hash = "sha256:33a8b6d9136fa7427160edb92d2e50f2035f04e9d63a2d1027349053e12626aa"}, + {file = "boto3-1.34.34.tar.gz", hash = "sha256:b2f321e20966f021ec800b7f2c01287a3dd04fc5965acdfbaa9c505a24ca45d1"}, +] [package.dependencies] botocore = ">=1.34.34,<1.35.0" @@ -788,9 +1036,12 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] name = "boto3-stubs" version = "1.28.40" description = "Type annotations for boto3 1.28.40 generated with mypy-boto3-builder 7.18.2" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "boto3-stubs-1.28.40.tar.gz", hash = "sha256:76079a82f199087319762c931f13506e02129132e80257dab0888d3da7dc11c7"}, + {file = "boto3_stubs-1.28.40-py3-none-any.whl", hash = "sha256:bd1d1cbdcbf18902a090d4a746cdecef2a7ebe31cf9a474bbe407d57eaa79a6a"}, +] [package.dependencies] botocore-stubs = "*" @@ -1165,9 +1416,12 @@ xray = ["mypy-boto3-xray (>=1.28.0,<1.29.0)"] name = "botocore" version = "1.34.34" description = "Low-level, data-driven core of boto 3." -category = "main" optional = true python-versions = ">= 3.8" +files = [ + {file = "botocore-1.34.34-py3-none-any.whl", hash = "sha256:cd060b0d88ebb2b893f1411c1db7f2ba66cc18e52dcc57ad029564ef5fec437b"}, + {file = "botocore-1.34.34.tar.gz", hash = "sha256:54093dc97372bb7683f5c61a279aa8240408abf3b2cc494ae82a9a90c1b784b5"}, +] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -1184,9 +1438,12 @@ crt = ["awscrt (==0.19.19)"] name = "botocore-stubs" version = "1.31.40" description = "Type annotations and code completion for botocore" -category = "main" optional = false python-versions = ">=3.7,<4.0" +files = [ + {file = "botocore_stubs-1.31.40-py3-none-any.whl", hash = "sha256:aab534d7e7949cd543bc9b2fadc1a36712033cb00e6f31e2475eefe8486d19ae"}, + {file = "botocore_stubs-1.31.40.tar.gz", hash = "sha256:2001a253daf4ae2e171e6137b9982a00a7fbfc7a53449a16856dc049e7cd5214"}, +] [package.dependencies] types-awscrt = "*" @@ -1196,33 +1453,95 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} name = "cachelib" version = "0.9.0" description = "A collection of cache libraries in the same API interface." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, + {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, +] [[package]] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, +] [[package]] name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] [[package]] name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] [package.dependencies] pycparser = "*" @@ -1231,25 +1550,107 @@ pycparser = "*" name = "chardet" version = "5.2.0" description = "Universal encoding detector for Python 3" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] [[package]] name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -1258,9 +1659,12 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "clickclick" version = "20.10.2" description = "Click utility functions" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, + {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, +] [package.dependencies] click = ">=4.0" @@ -1392,17 +1796,23 @@ zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "coloredlogs" version = "15.0.1" description = "Colored terminal output for Python's logging module" -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] [package.dependencies] humanfriendly = ">=9.1" @@ -1414,9 +1824,12 @@ cron = ["capturer (>=2.4)"] name = "colorlog" version = "4.8.0" description = "Log formatting with colors!" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "colorlog-4.8.0-py2.py3-none-any.whl", hash = "sha256:3dd15cb27e8119a24c1a7b5c93f9f3b455855e0f73993b1c25921b2f646f1dcd"}, + {file = "colorlog-4.8.0.tar.gz", hash = "sha256:59b53160c60902c405cdec28d38356e09d40686659048893e026ecbd589516b1"}, +] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} @@ -1425,9 +1838,12 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} name = "configupdater" version = "3.1.1" description = "Parser like ConfigParser but for updating configuration files" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "ConfigUpdater-3.1.1-py2.py3-none-any.whl", hash = "sha256:805986dbeba317886c7a8d348b2e34986dc9e3128cd3761ecc35decbd372b286"}, + {file = "ConfigUpdater-3.1.1.tar.gz", hash = "sha256:46f0c74d73efa723776764b43c9739f68052495dd3d734319c1d0eb58511f15b"}, +] [package.extras] testing = ["flake8", "pytest", "pytest-cov", "pytest-virtualenv", "pytest-xdist", "sphinx"] @@ -1436,17 +1852,37 @@ testing = ["flake8", "pytest", "pytest-cov", "pytest-virtualenv", "pytest-xdist" name = "connectorx" version = "0.3.2" description = "" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "connectorx-0.3.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:98274242c64a2831a8b1c86e0fa2c46a557dd8cbcf00c3adcf5a602455fb02d7"}, + {file = "connectorx-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2b11ba49efd330a7348bef3ce09c98218eea21d92a12dd75cd8f0ade5c99ffc"}, + {file = "connectorx-0.3.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3f6431a30304271f9137bd7854d2850231041f95164c6b749d9ede4c0d92d10c"}, + {file = "connectorx-0.3.2-cp310-none-win_amd64.whl", hash = "sha256:b370ebe8f44d2049254dd506f17c62322cc2db1b782a57f22cce01ddcdcc8fed"}, + {file = "connectorx-0.3.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:d5277fc936a80da3d1dcf889020e45da3493179070d9be8a47500c7001fab967"}, + {file = "connectorx-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cc6c963237c3d3b02f7dcd47e1be9fc6e8b93ef0aeed8694f65c62b3c4688a1"}, + {file = "connectorx-0.3.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:9403902685b3423cba786db01a36f36efef90ae3d429e45b74dadb4ae9e328dc"}, + {file = "connectorx-0.3.2-cp311-none-win_amd64.whl", hash = "sha256:6b5f518194a2cf12d5ad031d488ded4e4678eff3b63551856f2a6f1a83197bb8"}, + {file = "connectorx-0.3.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:a5602ae0531e55c58af8cfca92b8e9454fc1ccd82c801cff8ee0f17c728b4988"}, + {file = "connectorx-0.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c5959bfb4a049bb8ce1f590b5824cd1105460b6552ffec336c4bd740eebd5bd"}, + {file = "connectorx-0.3.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c4387bb27ba3acde0ab6921fdafa3811e09fce0db3d1f1ede8547d9de3aab685"}, + {file = "connectorx-0.3.2-cp38-none-win_amd64.whl", hash = "sha256:4b1920c191be9a372629c31c92d5f71fc63f49f283e5adfc4111169de40427d9"}, + {file = "connectorx-0.3.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4473fc06ac3618c673cea63a7050e721fe536782d5c1b6e433589c37a63de704"}, + {file = "connectorx-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4009b16399457340326137a223921a24e3e166b45db4dbf3ef637b9981914dc2"}, + {file = "connectorx-0.3.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:74f5b93535663cf47f9fc3d7964f93e652c07003fa71c38d7a68f42167f54bba"}, + {file = "connectorx-0.3.2-cp39-none-win_amd64.whl", hash = "sha256:0b80acca13326856c14ee726b47699011ab1baa10897180240c8783423ca5e8c"}, +] [[package]] name = "connexion" version = "2.14.1" description = "Connexion - API first applications with OpenAPI/Swagger and Flask" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, + {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, +] [package.dependencies] clickclick = ">=1.2,<21" @@ -1470,9 +1906,11 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 name = "cron-descriptor" version = "1.4.0" description = "A Python library that converts cron expressions into human readable strings." -category = "main" optional = false python-versions = "*" +files = [ + {file = "cron_descriptor-1.4.0.tar.gz", hash = "sha256:b6ff4e3a988d7ca04a4ab150248e9f166fb7a5c828a85090e75bcc25aa93b4dd"}, +] [package.extras] dev = ["polib"] @@ -1481,9 +1919,12 @@ dev = ["polib"] name = "croniter" version = "1.4.1" description = "croniter provides iteration for datetime object with cron like format" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "croniter-1.4.1-py2.py3-none-any.whl", hash = "sha256:9595da48af37ea06ec3a9f899738f1b2c1c13da3c38cea606ef7cd03ea421128"}, + {file = "croniter-1.4.1.tar.gz", hash = "sha256:1a6df60eacec3b7a0aa52a8f2ef251ae3dd2a7c7c8b9874e73e791636d55a361"}, +] [package.dependencies] python-dateutil = "*" @@ -1492,16 +1933,40 @@ python-dateutil = "*" name = "cryptography" version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +files = [ + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] nox = ["nox"] pep8test = ["black", "check-sdist", "mypy", "ruff"] sdist = ["build"] @@ -1513,9 +1978,12 @@ test-randomorder = ["pytest-randomly"] name = "databind-core" version = "4.4.0" description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. Compatible with Python 3.7 and newer." -category = "dev" optional = false python-versions = ">=3.6.3,<4.0.0" +files = [ + {file = "databind.core-4.4.0-py3-none-any.whl", hash = "sha256:3c8a4d9abc93e158af9931d8cec389ddfc0514e02aec03b397948d243db11881"}, + {file = "databind.core-4.4.0.tar.gz", hash = "sha256:715d485e934c073f819f0250bbfcaf59c1319f83427365bc7cfd4c347f87576d"}, +] [package.dependencies] Deprecated = ">=1.2.12,<2.0.0" @@ -1528,9 +1996,12 @@ typing-extensions = ">=3.10.0" name = "databind-json" version = "4.4.0" description = "De-/serialize Python dataclasses to or from JSON payloads. Compatible with Python 3.7 and newer." -category = "dev" optional = false python-versions = ">=3.6.3,<4.0.0" +files = [ + {file = "databind.json-4.4.0-py3-none-any.whl", hash = "sha256:df8874118cfba6fd0e77ec3d41a87e04e26034bd545230cab0db1fe904bf1b09"}, + {file = "databind.json-4.4.0.tar.gz", hash = "sha256:4356afdf0aeefcc053eda0888650c59cc558be2686f08a58324d675ccd023586"}, +] [package.dependencies] "databind.core" = ">=4.4.0,<5.0.0" @@ -1542,9 +2013,12 @@ typing-extensions = ">=3.10.0" name = "databricks-sdk" version = "0.17.0" description = "Databricks SDK for Python (Beta)" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "databricks-sdk-0.17.0.tar.gz", hash = "sha256:0a1baa6783aba9b034b9a017da8d0cf839ec61ae8318792b78bfb3db0374dd9c"}, + {file = "databricks_sdk-0.17.0-py3-none-any.whl", hash = "sha256:ad90e01c7b1a9d60a3de6a35606c79ac982e8972d3ad3ff89c251c24439c8bb9"}, +] [package.dependencies] google-auth = ">=2.0,<3.0" @@ -1558,9 +2032,12 @@ notebook = ["ipython (>=8,<9)", "ipywidgets (>=8,<9)"] name = "databricks-sql-connector" version = "2.9.3" description = "Databricks SQL Connector for Python" -category = "main" optional = true python-versions = ">=3.7.1,<4.0.0" +files = [ + {file = "databricks_sql_connector-2.9.3-py3-none-any.whl", hash = "sha256:e37b5aa8bea22e84a9920e87ad9ba6cafbe656008c180a790baa53b711dd9889"}, + {file = "databricks_sql_connector-2.9.3.tar.gz", hash = "sha256:09a1686de3470091e78640de276053d4e18f8c03ba3627ed45b368f78bf87db9"}, +] [package.dependencies] alembic = ">=1.0.11,<2.0.0" @@ -1585,9 +2062,12 @@ urllib3 = ">=1.0" name = "dbt-athena-community" version = "1.7.1" description = "The athena adapter plugin for dbt (data build tool)" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "dbt-athena-community-1.7.1.tar.gz", hash = "sha256:02c7bc461628e2adbfaf9d3f51fbe9a5cb5e06ee2ea8329259758518ceafdc12"}, + {file = "dbt_athena_community-1.7.1-py3-none-any.whl", hash = "sha256:2a376fa128e2bd98cb774fcbf718ebe4fbc9cac7857aa037b9e36bec75448361"}, +] [package.dependencies] boto3 = ">=1.26,<2.0" @@ -1602,9 +2082,12 @@ tenacity = ">=8.2,<9.0" name = "dbt-bigquery" version = "1.7.2" description = "The Bigquery adapter plugin for dbt" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "dbt-bigquery-1.7.2.tar.gz", hash = "sha256:27c7f492f65ab5d1d43432a4467a436fc3637e3cb72c5b4ab07ddf7573c43596"}, + {file = "dbt_bigquery-1.7.2-py3-none-any.whl", hash = "sha256:75015755363d9e8b8cebe190d59a5e08375032b37bcfec41ec8753e7dea29f6e"}, +] [package.dependencies] dbt-core = ">=1.7.0,<1.8.0" @@ -1617,9 +2100,12 @@ google-cloud-storage = ">=2.4,<3.0" name = "dbt-core" version = "1.7.4" description = "With dbt, data analysts and engineers can build analytics the way engineers build applications." -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "dbt-core-1.7.4.tar.gz", hash = "sha256:769b95949210cb0d1eafdb7be48b01e59984650403f86510fdee65bd0f70f76d"}, + {file = "dbt_core-1.7.4-py3-none-any.whl", hash = "sha256:50050ae44fe9bad63e1b639810ed3629822cdc7a2af0eff6e08461c94c4527c0"}, +] [package.dependencies] agate = ">=1.7.0,<1.8.0" @@ -1650,9 +2136,12 @@ urllib3 = ">=1.0,<2.0" name = "dbt-databricks" version = "1.7.3" description = "The Databricks adapter plugin for dbt" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "dbt-databricks-1.7.3.tar.gz", hash = "sha256:045e26240c825342259a59004c2e35e7773b0b6cbb255e6896bd46d3810f9607"}, + {file = "dbt_databricks-1.7.3-py3-none-any.whl", hash = "sha256:7c2b7bd7228a401d8262781749fc496c825fe6050e661e5ab3f1c66343e311cc"}, +] [package.dependencies] databricks-sdk = ">=0.9.0" @@ -1664,9 +2153,12 @@ keyring = ">=23.13.0" name = "dbt-duckdb" version = "1.7.1" description = "The duckdb adapter plugin for dbt (data build tool)" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "dbt-duckdb-1.7.1.tar.gz", hash = "sha256:e59b3e58d7a461988d000892b75ce95245cdf899c847e3a430eb2e9e10e63bb9"}, + {file = "dbt_duckdb-1.7.1-py3-none-any.whl", hash = "sha256:bd75b1a72924b942794d0c3293a1159a01f21ab9d82c9f18b22c253dedad101a"}, +] [package.dependencies] dbt-core = ">=1.7.0,<1.8.0" @@ -1679,17 +2171,37 @@ glue = ["boto3", "mypy-boto3-glue"] name = "dbt-extractor" version = "0.5.1" description = "A tool to analyze and extract information from Jinja used in dbt projects." -category = "main" optional = false python-versions = ">=3.6.1" +files = [ + {file = "dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3b91e6106b967d908b34f83929d3f50ee2b498876a1be9c055fe060ed728c556"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3614ce9f83ae4cd0dc95f77730034a793a1c090a52dcf698ba1c94050afe3a8b"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ea4edf33035d0a060b1e01c42fb2d99316457d44c954d6ed4eed9f1948664d87"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b9bf50eb062b4344d9546fe42038996c6e7e7daa10724aa955d64717260e5d"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c0ce901d4ebf0664977e4e1cbf596d4afc6c1339fcc7d2cf67ce3481566a626f"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cbe338b76e9ffaa18275456e041af56c21bb517f6fbda7a58308138703da0996"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b25fa7a276ab26aa2d70ff6e0cf4cfb1490d7831fb57ee1337c24d2b0333b84"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5651e458be910ff567c0da3ea2eb084fd01884cc88888ac2cf1e240dcddacc2"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62e4f040fd338b652683421ce48e903812e27fd6e7af58b1b70a4e1f9f2c79e3"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91e25ad78f1f4feadd27587ebbcc46ad909cfad843118908f30336d08d8400ca"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:cdf9938b36cd098bcdd80f43dc03864da3f69f57d903a9160a32236540d4ddcd"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:475e2c05b17eb4976eff6c8f7635be42bec33f15a74ceb87a40242c94a99cebf"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:100453ba06e169cbdb118234ab3f06f6722a2e0e316089b81c88dea701212abc"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-win32.whl", hash = "sha256:6916aae085fd5f2af069fd6947933e78b742c9e3d2165e1740c2e28ae543309a"}, + {file = "dbt_extractor-0.5.1-cp38-abi3-win_amd64.whl", hash = "sha256:eecc08f3743e802a8ede60c89f7b2bce872acc86120cbc0ae7df229bb8a95083"}, + {file = "dbt_extractor-0.5.1.tar.gz", hash = "sha256:cd5d95576a8dea4190240aaf9936a37fd74b4b7913ca69a3c368fc4472bb7e13"}, +] [[package]] name = "dbt-postgres" version = "1.7.4" description = "The postgres adapter plugin for dbt (data build tool)" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "dbt-postgres-1.7.4.tar.gz", hash = "sha256:16185b8de36d1a2052a2e4b85512306ab55085b1ea323a353d0dc3628473208d"}, + {file = "dbt_postgres-1.7.4-py3-none-any.whl", hash = "sha256:d414b070ca5e48925ea9ab12706bbb9e2294f7d4509c28e7af42268596334044"}, +] [package.dependencies] agate = "*" @@ -1700,9 +2212,12 @@ psycopg2-binary = ">=2.8,<3.0" name = "dbt-redshift" version = "1.7.1" description = "The Redshift adapter plugin for dbt" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "dbt-redshift-1.7.1.tar.gz", hash = "sha256:6da69a83038d011570d131b85171842d0858a46bca3757419ae193b5724a2119"}, + {file = "dbt_redshift-1.7.1-py3-none-any.whl", hash = "sha256:2a48b9424934f5445e4285740ebe512afaa75882138121536ccc21d027ef62f2"}, +] [package.dependencies] agate = "*" @@ -1714,9 +2229,12 @@ redshift-connector = "2.0.915" name = "dbt-semantic-interfaces" version = "0.4.3" description = "The shared semantic layer definitions that dbt-core and MetricFlow use" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "dbt_semantic_interfaces-0.4.3-py3-none-any.whl", hash = "sha256:af6ab8509da81ae5f5f1d5631c9761cccaed8cd5311d4824a8d4168ecd0f2093"}, + {file = "dbt_semantic_interfaces-0.4.3.tar.gz", hash = "sha256:9a46d07ad022a4c48783565a776ebc6f1d19e0412e70c4759bc9d7bba461ea1c"}, +] [package.dependencies] click = ">=7.0,<9.0" @@ -1733,9 +2251,12 @@ typing-extensions = ">=4.4,<5.0" name = "dbt-snowflake" version = "1.7.1" description = "The Snowflake adapter plugin for dbt" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "dbt-snowflake-1.7.1.tar.gz", hash = "sha256:842a9e87b9e2d999e3bc27aaa369398a4d02bb3f8bb7447aa6151204d4eb90f0"}, + {file = "dbt_snowflake-1.7.1-py3-none-any.whl", hash = "sha256:32ef8733f67dcf4eb594d1b80852ef0b67e920f25bb8a2953031a3868a8d2b3e"}, +] [package.dependencies] agate = "*" @@ -1746,9 +2267,12 @@ snowflake-connector-python = {version = ">=3.0,<4.0", extras = ["secure-local-st name = "dbt-spark" version = "1.7.1" description = "The Apache Spark adapter plugin for dbt" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "dbt-spark-1.7.1.tar.gz", hash = "sha256:a10e5d1bfdb2ca98e7ae2badd06150e2695d9d4fa18ae2354ed5bd093d77f947"}, + {file = "dbt_spark-1.7.1-py3-none-any.whl", hash = "sha256:99b5002edcdb82058a3b0ad33eb18b91a4bdde887d94855e8bd6f633d78837dc"}, +] [package.dependencies] dbt-core = ">=1.7.0,<1.8.0" @@ -1764,9 +2288,12 @@ session = ["pyspark (>=3.0.0,<4.0.0)"] name = "decopatch" version = "1.4.10" description = "Create decorators easily in python." -category = "dev" optional = false python-versions = "*" +files = [ + {file = "decopatch-1.4.10-py2.py3-none-any.whl", hash = "sha256:e151f7f93de2b1b3fd3f3272dcc7cefd1a69f68ec1c2d8e288ecd9deb36dc5f7"}, + {file = "decopatch-1.4.10.tar.gz", hash = "sha256:957f49c93f4150182c23f8fb51d13bb3213e0f17a79e09c8cca7057598b55720"}, +] [package.dependencies] makefun = ">=1.5.0" @@ -1775,17 +2302,23 @@ makefun = ">=1.5.0" name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] [[package]] name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] [package.dependencies] wrapt = ">=1.10,<2" @@ -1797,9 +2330,12 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "diff-cover" version = "7.7.0" description = "Run coverage and linting reports on diffs" -category = "dev" optional = false python-versions = ">=3.7.2,<4.0.0" +files = [ + {file = "diff_cover-7.7.0-py3-none-any.whl", hash = "sha256:bf86f32ec999f9a9e79bf24969f7127ea7b4e55c3ef3cd9300feb13188c89736"}, + {file = "diff_cover-7.7.0.tar.gz", hash = "sha256:60614cf7e722cf7fb1bde497afac0b514294e1e26534449622dac4da296123fb"}, +] [package.dependencies] chardet = ">=3.0.0" @@ -1814,9 +2350,12 @@ toml = ["tomli (>=1.2.1)"] name = "dill" version = "0.3.7" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] [package.extras] graph = ["objgraph (>=1.7.2)"] @@ -1825,9 +2364,12 @@ graph = ["objgraph (>=1.7.2)"] name = "dnspython" version = "2.4.2" description = "DNS toolkit" -category = "dev" optional = false python-versions = ">=3.8,<4.0" +files = [ + {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, + {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, +] [package.extras] dnssec = ["cryptography (>=2.6,<42.0)"] @@ -1841,9 +2383,12 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docspec" version = "2.2.1" description = "Docspec is a JSON object specification for representing API documentation of programming languages." -category = "dev" optional = false python-versions = ">=3.7,<4.0" +files = [ + {file = "docspec-2.2.1-py3-none-any.whl", hash = "sha256:7538f750095a9688c6980ff9a4e029a823a500f64bd00b6b4bdb27951feb31cb"}, + {file = "docspec-2.2.1.tar.gz", hash = "sha256:4854e77edc0e2de40e785e57e95880f7095a05fe978f8b54cef7a269586e15ff"}, +] [package.dependencies] "databind.core" = ">=4.2.6,<5.0.0" @@ -1854,9 +2399,12 @@ Deprecated = ">=1.2.12,<2.0.0" name = "docspec-python" version = "2.2.1" description = "A parser based on lib2to3 producing docspec data from Python source code." -category = "dev" optional = false python-versions = ">=3.7,<4.0" +files = [ + {file = "docspec_python-2.2.1-py3-none-any.whl", hash = "sha256:76ac41d35a8face35b2d766c2e8a416fb8832359785d396f0d53bcb00f178e54"}, + {file = "docspec_python-2.2.1.tar.gz", hash = "sha256:c41b850b4d6f4de30999ea6f82c9cdb9183d9bcba45559ee9173d3dab7281559"}, +] [package.dependencies] black = ">=23.1.0,<24.0.0" @@ -1867,9 +2415,11 @@ docspec = ">=2.2.1,<3.0.0" name = "docstring-parser" version = "0.11" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "docstring_parser-0.11.tar.gz", hash = "sha256:93b3f8f481c7d24e37c5d9f30293c89e2933fa209421c8abd731dd3ef0715ecb"}, +] [package.extras] test = ["black", "pytest"] @@ -1878,17 +2428,23 @@ test = ["black", "pytest"] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] [[package]] name = "domdf-python-tools" version = "3.6.1" description = "Helpful functions for Python 🐍 🛠️" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "domdf_python_tools-3.6.1-py3-none-any.whl", hash = "sha256:e18158460850957f18e740eb94ede56f580ddb0cb162ab9d9834ed8bbb1b6431"}, + {file = "domdf_python_tools-3.6.1.tar.gz", hash = "sha256:acc04563d23bce4d437dd08af6b9bea788328c412772a044d8ca428a7ad861be"}, +] [package.dependencies] importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.9\""} @@ -1903,25 +2459,116 @@ dates = ["pytz (>=2019.1)"] name = "duckdb" version = "0.9.2" description = "DuckDB embedded database" -category = "main" optional = false python-versions = ">=3.7.0" +files = [ + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, + {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, + {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, + {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, + {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, + {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, + {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, + {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, + {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, + {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, + {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, + {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, + {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, + {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, + {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, + {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, + {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, + {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, +] [[package]] name = "duckdb" version = "0.10.0" description = "DuckDB in-process database" -category = "main" optional = false python-versions = ">=3.7.0" +files = [ + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd0ffb3fddef0f72a150e4d76e10942a84a1a0447d10907df1621b90d6668060"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f3d709d5c7c1a12b5e10d0b05fa916c670cd2b50178e3696faa0cc16048a1745"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9114aa22ec5d591a20ce5184be90f49d8e5b5348ceaab21e102c54560d07a5f8"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a37877efadf39caf7cadde0f430fedf762751b9c54750c821e2f1316705a21"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87cbc9e1d9c3fc9f14307bea757f99f15f46843c0ab13a6061354410824ed41f"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0bfec79fed387201550517d325dff4fad2705020bc139d936cab08b9e845662"}, + {file = "duckdb-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5622134d2d9796b15e09de810e450859d4beb46d9b861357ec9ae40a61b775c"}, + {file = "duckdb-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:089ee8e831ccaef1b73fc89c43b661567175eed0115454880bafed5e35cda702"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a05af63747f1d7021995f0811c333dee7316cec3b06c0d3e4741b9bdb678dd21"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:072d6eba5d8a59e0069a8b5b4252fed8a21f9fe3f85a9129d186a39b3d0aea03"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a77b85668f59b919042832e4659538337f1c7f197123076c5311f1c9cf077df7"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a666f1d2da65d03199a977aec246920920a5ea1da76b70ae02bd4fb1ffc48c"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec76a4262b783628d26612d184834852d9c92fb203e91af789100c17e3d7173"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009dd9d2cdbd3b061a9efbdfc79f2d1a8377bcf49f1e5f430138621f8c083a6c"}, + {file = "duckdb-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:878f06766088090dad4a2e5ee0081555242b2e8dcb29415ecc97e388cf0cf8d8"}, + {file = "duckdb-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:713ff0a1fb63a6d60f454acf67f31656549fb5d63f21ac68314e4f522daa1a89"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c0ee450dfedfb52dd4957244e31820feef17228da31af6d052979450a80fd19"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ff79b2ea9994398b545c0d10601cd73565fbd09f8951b3d8003c7c5c0cebc7cb"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bdf1aa71b924ef651062e6b8ff9981ad85bec89598294af8a072062c5717340"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0265bbc8216be3ced7b377ba8847128a3fc0ef99798a3c4557c1b88e3a01c23"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d418a315a07707a693bd985274c0f8c4dd77015d9ef5d8d3da4cc1942fd82e0"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2828475a292e68c71855190b818aded6bce7328f79e38c04a0c75f8f1c0ceef0"}, + {file = "duckdb-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3aaeaae2eba97035c65f31ffdb18202c951337bf2b3d53d77ce1da8ae2ecf51"}, + {file = "duckdb-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c51790aaaea97d8e4a58a114c371ed8d2c4e1ca7cbf29e3bdab6d8ccfc5afc1e"}, + {file = "duckdb-0.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8af1ae7cc77a12206b6c47ade191882cc8f49f750bb3e72bb86ac1d4fa89926a"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4f7e8e8dc0e376aeb280b83f2584d0e25ec38985c27d19f3107b2edc4f4a97"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae942a79fad913defa912b56483cd7827a4e7721f4ce4bc9025b746ecb3c89"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01b57802898091455ca2a32c1335aac1e398da77c99e8a96a1e5de09f6a0add9"}, + {file = "duckdb-0.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52e1ad4a55fa153d320c367046b9500578192e01c6d04308ba8b540441736f2c"}, + {file = "duckdb-0.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:904c47d04095af745e989c853f0bfc0776913dfc40dfbd2da7afdbbb5f67fed0"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:184ae7ea5874f3b8fa51ab0f1519bdd088a0b78c32080ee272b1d137e2c8fd9c"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd33982ecc9bac727a032d6cedced9f19033cbad56647147408891eb51a6cb37"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f59bf0949899105dd5f8864cb48139bfb78454a8c017b8258ba2b5e90acf7afc"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:395f3b18948001e35dceb48a4423d574e38656606d033eef375408b539e7b076"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b8eb2b803be7ee1df70435c33b03a4598cdaf676cd67ad782b288dcff65d781"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31b2ddd331801064326c8e3587a4db8a31d02aef11332c168f45b3bd92effb41"}, + {file = "duckdb-0.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8b89e76a041424b8c2026c5dc1f74b53fbbc6c6f650d563259885ab2e7d093d"}, + {file = "duckdb-0.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:79084a82f16c0a54f6bfb7ded5600400c2daa90eb0d83337d81a56924eaee5d4"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79799b3a270dcd9070f677ba510f1e66b112df3068425691bac97c5e278929c7"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8fc394bfe3434920cdbcfbdd0ac3ba40902faa1dbda088db0ba44003a45318a"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c116605551b4abf5786243a59bcef02bd69cc51837d0c57cafaa68cdc428aa0c"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3191170c3b0a43b0c12644800326f5afdea00d5a4621d59dbbd0c1059139e140"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fee69a50eb93c72dc77e7ab1fabe0c38d21a52c5da44a86aa217081e38f9f1bd"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5f449e87dacb16b0d145dbe65fa6fdb5a55b2b6911a46d74876e445dd395bac"}, + {file = "duckdb-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4487d0df221b17ea4177ad08131bc606b35f25cfadf890987833055b9d10cdf6"}, + {file = "duckdb-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:c099ae2ff8fe939fda62da81704f91e2f92ac45e48dc0e37c679c9d243d01e65"}, + {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, +] [[package]] name = "email-validator" version = "1.3.1" description = "A robust email address syntax and deliverability validation library." -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "email_validator-1.3.1-py2.py3-none-any.whl", hash = "sha256:49a72f5fa6ed26be1c964f0567d931d10bf3fdeeacdf97bc26ef1cd2a44e0bda"}, + {file = "email_validator-1.3.1.tar.gz", hash = "sha256:d178c5c6fa6c6824e9b04f199cf23e79ac15756786573c190d2ad13089411ad2"}, +] [package.dependencies] dnspython = ">=1.15.0" @@ -1931,9 +2578,12 @@ idna = ">=2.0.0" name = "enlighten" version = "1.11.2" description = "Enlighten Progress Bar" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "enlighten-1.11.2-py2.py3-none-any.whl", hash = "sha256:98c9eb20e022b6a57f1c8d4f17e16760780b6881e6d658c40f52d21255ea45f3"}, + {file = "enlighten-1.11.2.tar.gz", hash = "sha256:9284861dee5a272e0e1a3758cd3f3b7180b1bd1754875da76876f2a7f46ccb61"}, +] [package.dependencies] blessed = ">=1.17.7" @@ -1943,17 +2593,23 @@ prefixed = ">=0.3.2" name = "et-xmlfile" version = "1.1.0" description = "An implementation of lxml.xmlfile for the standard library" -category = "main" optional = true python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] [[package]] name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] [package.extras] test = ["pytest (>=6)"] @@ -1962,10 +2618,13 @@ test = ["pytest (>=6)"] name = "fastembed" version = "0.1.1" description = "Fast, light, accurate library built for retrieval embedding generation" -category = "main" optional = true python-versions = ">=3.8.0,<3.12" - +files = [ + {file = "fastembed-0.1.1-py3-none-any.whl", hash = "sha256:131413ae52cd72f4c8cced7a675f8269dbfd1a852abade3c815e265114bcc05a"}, + {file = "fastembed-0.1.1.tar.gz", hash = "sha256:f7e524ee4f74bb8aad16be5b687d1f77f608d40e96e292c87881dc36baf8f4c7"}, +] + [package.dependencies] onnx = ">=1.11,<2.0" onnxruntime = ">=1.15,<2.0" @@ -1977,9 +2636,12 @@ tqdm = ">=4.65,<5.0" name = "filelock" version = "3.12.3" description = "A platform independent file lock." -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, + {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, +] [package.dependencies] typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.11\""} @@ -1992,9 +2654,12 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pyt name = "flake8" version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" @@ -2005,9 +2670,12 @@ pyflakes = ">=2.5.0,<2.6.0" name = "flake8-bugbear" version = "22.12.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, + {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, +] [package.dependencies] attrs = ">=19.2.0" @@ -2020,9 +2688,12 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] name = "flake8-builtins" version = "1.5.3" description = "Check for python builtins being used as variables or parameters." -category = "dev" optional = false python-versions = "*" +files = [ + {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, + {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, +] [package.dependencies] flake8 = "*" @@ -2034,9 +2705,12 @@ test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"] name = "flake8-encodings" version = "0.5.0.post1" description = "A Flake8 plugin to identify incorrect use of encodings." -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "flake8_encodings-0.5.0.post1-py3-none-any.whl", hash = "sha256:d2fecca0e89ba09c86e5d61cf6bdb1b337f0d74746aac67bbcf0c517b4cb6cba"}, + {file = "flake8_encodings-0.5.0.post1.tar.gz", hash = "sha256:082c0163325c85b438a8106e876283b5ed3cbfc53e68d89130d70be8be4c9977"}, +] [package.dependencies] astatine = ">=0.3.1" @@ -2052,9 +2726,12 @@ classes = ["jedi (>=0.18.0)"] name = "flake8-helper" version = "0.2.1" description = "A helper library for Flake8 plugins." -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "flake8_helper-0.2.1-py3-none-any.whl", hash = "sha256:9123cdf351ad32ee8a51b85036052302c478122d62fb512c0773e111b3d05241"}, + {file = "flake8_helper-0.2.1.tar.gz", hash = "sha256:479f86d1c52df8e49ff876ecd3873242699f93eeece7e6675cdca9c37c9b0a16"}, +] [package.dependencies] flake8 = ">=3.8.4" @@ -2063,9 +2740,12 @@ flake8 = ">=3.8.4" name = "flake8-tidy-imports" version = "4.10.0" description = "A flake8 plugin that helps you write tidier imports." -category = "dev" optional = false python-versions = ">=3.8" +files = [ + {file = "flake8_tidy_imports-4.10.0-py3-none-any.whl", hash = "sha256:b0387fb2ea200441bd142309e716fb7b8f4b0937bdf5f8b7c0c118a5f5e2b8ed"}, + {file = "flake8_tidy_imports-4.10.0.tar.gz", hash = "sha256:bd6cf86465402d2b86903009b748d85a628e599e17b76e810c9857e3a2815173"}, +] [package.dependencies] flake8 = ">=3.8.0" @@ -2074,9 +2754,12 @@ flake8 = ">=3.8.0" name = "flask" version = "2.2.5" description = "A simple framework for building complex web applications." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"}, + {file = "Flask-2.2.5.tar.gz", hash = "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"}, +] [package.dependencies] click = ">=8.0" @@ -2093,9 +2776,12 @@ dotenv = ["python-dotenv"] name = "flask-appbuilder" version = "4.3.10" description = "Simple and rapid application development framework, built on top of Flask. includes detailed security, auto CRUD generation for your models, google charts and much more." -category = "dev" optional = false python-versions = "~=3.7" +files = [ + {file = "Flask-AppBuilder-4.3.10.tar.gz", hash = "sha256:4173c878e56b81c6acac5e3c80c133f4183f43442fd944552bd9f4023f5baceb"}, + {file = "Flask_AppBuilder-4.3.10-py3-none-any.whl", hash = "sha256:c0af506e1a68e7ee14f26a16fda829f1a14f8343654c30bdbb1351d23c545df9"}, +] [package.dependencies] apispec = {version = ">=6.0.0,<7", extras = ["yaml"]} @@ -2130,9 +2816,12 @@ talisman = ["flask-talisman (>=1.0.0,<2.0)"] name = "flask-babel" version = "2.0.0" description = "Adds i18n/l10n support to Flask applications" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "Flask-Babel-2.0.0.tar.gz", hash = "sha256:f9faf45cdb2e1a32ea2ec14403587d4295108f35017a7821a2b1acb8cfd9257d"}, + {file = "Flask_Babel-2.0.0-py3-none-any.whl", hash = "sha256:e6820a052a8d344e178cdd36dd4bb8aea09b4bda3d5f9fa9f008df2c7f2f5468"}, +] [package.dependencies] Babel = ">=2.3" @@ -2147,9 +2836,12 @@ dev = ["Pallets-Sphinx-Themes", "bumpversion", "ghp-import", "pytest", "pytest-m name = "flask-caching" version = "2.0.2" description = "Adds caching support to Flask applications." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Flask-Caching-2.0.2.tar.gz", hash = "sha256:24b60c552d59a9605cc1b6a42c56cdb39a82a28dab4532bbedb9222ae54ecb4e"}, + {file = "Flask_Caching-2.0.2-py3-none-any.whl", hash = "sha256:19571f2570e9b8dd9dd9d2f49d7cbee69c14ebe8cc001100b1eb98c379dd80ad"}, +] [package.dependencies] cachelib = ">=0.9.0,<0.10.0" @@ -2159,9 +2851,12 @@ Flask = "<3" name = "flask-jwt-extended" version = "4.5.2" description = "Extended JWT integration with Flask" -category = "dev" optional = false python-versions = ">=3.7,<4" +files = [ + {file = "Flask-JWT-Extended-4.5.2.tar.gz", hash = "sha256:ba56245ba43b71c8ae936784b867625dce8b9956faeedec2953222e57942fb0b"}, + {file = "Flask_JWT_Extended-4.5.2-py2.py3-none-any.whl", hash = "sha256:e0ef23d8c863746bd141046167073699e1a7b03c97169cbba70f05b8d9cd6b9e"}, +] [package.dependencies] Flask = ">=2.0,<3.0" @@ -2175,9 +2870,12 @@ asymmetric-crypto = ["cryptography (>=3.3.1)"] name = "flask-limiter" version = "3.5.0" description = "Rate limiting for flask applications" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Flask-Limiter-3.5.0.tar.gz", hash = "sha256:13a3491b994c49f7cb4706587a38ca47e8162b576530472df38be68104f299c0"}, + {file = "Flask_Limiter-3.5.0-py3-none-any.whl", hash = "sha256:dbda4174f44e6cb858c6eb75e7488186f2977dd5d33d7028ba1aabf179de1bee"}, +] [package.dependencies] Flask = ">=2" @@ -2195,9 +2893,12 @@ redis = ["limits[redis]"] name = "flask-login" version = "0.6.2" description = "User authentication and session management for Flask." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Flask-Login-0.6.2.tar.gz", hash = "sha256:c0a7baa9fdc448cdd3dd6f0939df72eec5177b2f7abe6cb82fc934d29caac9c3"}, + {file = "Flask_Login-0.6.2-py3-none-any.whl", hash = "sha256:1ef79843f5eddd0f143c2cd994c1b05ac83c0401dc6234c143495af9a939613f"}, +] [package.dependencies] Flask = ">=1.0.4" @@ -2207,9 +2908,12 @@ Werkzeug = ">=1.0.1" name = "flask-session" version = "0.5.0" description = "Server-side session support for Flask" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Flask-Session-0.5.0.tar.gz", hash = "sha256:190875e6aebf2953c6803d42379ef3b934bc209ef8ef006f97aecb08f5aaeb86"}, + {file = "flask_session-0.5.0-py3-none-any.whl", hash = "sha256:1619bcbc16f04f64e90f8e0b17145ba5c9700090bb1294e889956c1282d58631"}, +] [package.dependencies] cachelib = "*" @@ -2219,9 +2923,12 @@ flask = ">=2.2" name = "flask-sqlalchemy" version = "2.5.1" description = "Adds SQLAlchemy support to your Flask application." -category = "dev" optional = false python-versions = ">= 2.7, != 3.0.*, != 3.1.*, != 3.2.*, != 3.3.*" +files = [ + {file = "Flask-SQLAlchemy-2.5.1.tar.gz", hash = "sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912"}, + {file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"}, +] [package.dependencies] Flask = ">=0.10" @@ -2231,9 +2938,12 @@ SQLAlchemy = ">=0.8.0" name = "flask-wtf" version = "1.1.1" description = "Form rendering, validation, and CSRF protection for Flask with WTForms." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Flask-WTF-1.1.1.tar.gz", hash = "sha256:41c4244e9ae626d63bed42ae4785b90667b885b1535d5a4095e1f63060d12aa9"}, + {file = "Flask_WTF-1.1.1-py3-none-any.whl", hash = "sha256:7887d6f1ebb3e17bf648647422f0944c9a469d0fcf63e3b66fb9a83037e38b2c"}, +] [package.dependencies] Flask = "*" @@ -2247,25 +2957,93 @@ email = ["email-validator"] name = "flatbuffers" version = "23.5.26" description = "The FlatBuffers serialization format for Python" -category = "main" optional = true python-versions = "*" +files = [ + {file = "flatbuffers-23.5.26-py2.py3-none-any.whl", hash = "sha256:c0ff356da363087b915fde4b8b45bdda73432fc17cddb3c8157472eab1422ad1"}, + {file = "flatbuffers-23.5.26.tar.gz", hash = "sha256:9ea1144cac05ce5d86e2859f431c6cd5e66cd9c78c558317c7955fb8d4c78d89"}, +] [[package]] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, + {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, + {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, + {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, + {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, + {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, + {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, + {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, + {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, + {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, +] [[package]] name = "fsspec" version = "2024.2.0" description = "File-system specification" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"}, + {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"}, +] [package.extras] abfs = ["adlfs"] @@ -2295,17 +3073,22 @@ tqdm = ["tqdm"] name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, +] [[package]] name = "gcsfs" version = "2024.2.0" description = "Convenient Filesystem interface over GCS" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "gcsfs-2024.2.0-py2.py3-none-any.whl", hash = "sha256:20bf70cc81d580474dd299d55e1ffcf8b3e81721aeb562e148ca0a3c900d0421"}, + {file = "gcsfs-2024.2.0.tar.gz", hash = "sha256:f7cffd7cae2fb50c56ef883f8aef9792be045b5059f06c1902c3a6151509f506"}, +] [package.dependencies] aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" @@ -2324,9 +3107,12 @@ gcsfuse = ["fusepy"] name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, +] [package.dependencies] smmap = ">=3.0.1,<6" @@ -2335,9 +3121,12 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.34" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.34-py3-none-any.whl", hash = "sha256:5d3802b98a3bae1c2b8ae0e1ff2e4aa16bcdf02c145da34d092324f599f01395"}, + {file = "GitPython-3.1.34.tar.gz", hash = "sha256:85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd"}, +] [package.dependencies] gitdb = ">=4.0.1,<5" @@ -2346,28 +3135,34 @@ gitdb = ">=4.0.1,<5" name = "giturlparse" version = "0.11.1" description = "A Git URL parsing module (supports parsing and rewriting)" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "giturlparse-0.11.1-py2.py3-none-any.whl", hash = "sha256:6422f25c8ca563e1a3cb6b85862e48614be804cd1334e6d84be5630eb26b343f"}, + {file = "giturlparse-0.11.1.tar.gz", hash = "sha256:cdbe0c062096c69e00f08397826dddebc1f73bc15b793994579c13aafc70c990"}, +] [[package]] name = "google-api-core" version = "2.11.1" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, + {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, +] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -2381,12 +3176,15 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-api-python-client" version = "2.97.0" description = "Google API Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "google-api-python-client-2.97.0.tar.gz", hash = "sha256:48277291894876a1ca7ed4127e055e81f81e6343ced1b544a7200ae2c119dcd7"}, + {file = "google_api_python_client-2.97.0-py2.py3-none-any.whl", hash = "sha256:5215f4cd577753fc4192ccfbe0bb8b55d4bb5fd68fa6268ac5cf271b6305de31"}, +] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" httplib2 = ">=0.15.0,<1.dev0" @@ -2396,9 +3194,12 @@ uritemplate = ">=3.0.1,<5" name = "google-auth" version = "2.22.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, + {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, +] [package.dependencies] cachetools = ">=2.0.0,<6.0" @@ -2418,9 +3219,12 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-httplib2" version = "0.1.0" description = "Google Authentication Library: httplib2 transport" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, + {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, +] [package.dependencies] google-auth = "*" @@ -2431,9 +3235,12 @@ six = "*" name = "google-auth-oauthlib" version = "1.0.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "google-auth-oauthlib-1.0.0.tar.gz", hash = "sha256:e375064964820b47221a7e1b7ee1fd77051b6323c3f9e3e19785f78ab67ecfc5"}, + {file = "google_auth_oauthlib-1.0.0-py2.py3-none-any.whl", hash = "sha256:95880ca704928c300f48194d1770cf5b1462835b6e49db61445a520f793fd5fb"}, +] [package.dependencies] google-auth = ">=2.15.0" @@ -2446,12 +3253,15 @@ tool = ["click (>=6.0.0)"] name = "google-cloud-bigquery" version = "3.11.4" description = "Google BigQuery API client library" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "google-cloud-bigquery-3.11.4.tar.gz", hash = "sha256:697df117241a2283bcbb93b21e10badc14e51c9a90800d2a7e1a3e1c7d842974"}, + {file = "google_cloud_bigquery-3.11.4-py2.py3-none-any.whl", hash = "sha256:5fa7897743a0ed949ade25a0942fc9e7557d8fce307c6f8a76d1b604cf27f1b1"}, +] [package.dependencies] -google-api-core = {version = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.6.0,<3.0.0dev" google-resumable-media = ">=0.6.0,<3.0dev" grpcio = [ @@ -2478,12 +3288,15 @@ tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] name = "google-cloud-core" version = "2.3.3" description = "Google Cloud API client core library" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, + {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, +] [package.dependencies] -google-api-core = ">=1.31.6,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] @@ -2493,12 +3306,15 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"] name = "google-cloud-dataproc" version = "5.4.3" description = "Google Cloud Dataproc API client library" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "google-cloud-dataproc-5.4.3.tar.gz", hash = "sha256:d9c77c52aa5ddf52ae657736dbfb5312402933f72bab8480fc2d2afe98697402"}, + {file = "google_cloud_dataproc-5.4.3-py2.py3-none-any.whl", hash = "sha256:9cfff56cb53621cdffd0a3d6b10701e886e0a8ad54891e6c223eb67c0ff753ad"}, +] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" proto-plus = [ {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, @@ -2510,12 +3326,15 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 name = "google-cloud-storage" version = "2.10.0" description = "Google Cloud Storage API client library" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "google-cloud-storage-2.10.0.tar.gz", hash = "sha256:934b31ead5f3994e5360f9ff5750982c5b6b11604dc072bc452c25965e076dc7"}, + {file = "google_cloud_storage-2.10.0-py2.py3-none-any.whl", hash = "sha256:9433cf28801671de1c80434238fb1e7e4a1ba3087470e90f70c928ea77c2b9d7"}, +] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-resumable-media = ">=2.3.2" @@ -2528,68 +3347,136 @@ protobuf = ["protobuf (<5.0.0dev)"] name = "google-crc32c" version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" -category = "main" optional = true python-versions = ">=3.7" - -[package.extras] -testing = ["pytest"] - -[[package]] -name = "google-re2" -version = "1.1" -description = "RE2 Python bindings" -category = "dev" -optional = false -python-versions = "~=3.8" files = [ - {file = "google-re2-1.1.tar.gz", hash = "sha256:d3a9467ee52b46ac77ca928f6d0cbeaccfd92f03ca0f0f65b9df6a95184f3a1c"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:874d2e36dfa506b03d4f9c4aef1701a65304f4004c96c7edac7d8aea08fe193e"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b66eb84850afdce09aabca40bcd6f2a0e96178a1b4990d555678edb1f59bf255"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c461640a07db26dc2b51f43de607b7520e7debaf4f6a000f796a3c0196ca52af"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7f9ba69eaee6e7a9f5ddfb919bf1a866af14a18b26a179e3fb1a6fe3d0cbf349"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:f95cf16739cc3ea63728366881221b119f2322b4b739b7da6522d45a68792cea"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:9fb56a41250191298e6a2859b0fdea1e83330c9870fe8d84e5836c506ae46e96"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb22ea995564d87baf4a4bfbb3ca024be913683a710f4f0dc9c94dc663afab20"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19b3f0bfbb2a2ca58ed0aaa9356d07a5c0921383a6dbeca086b2b74472f5ee08"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34fd7f97b84af7453cf05b25adfe2491ba3cef1ca548ac2907efa63d3510954d"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e029664192d8d30f7c977706183ef483e82ca239302272df74e01d2e22897ca"}, - {file = "google_re2-1.1-1-cp310-cp310-win32.whl", hash = "sha256:41a8f222f9839d059d37efd28e4deec203502d7e39c3759d83d6a33deadf1d2e"}, - {file = "google_re2-1.1-1-cp310-cp310-win_amd64.whl", hash = "sha256:6141d569fdf72aa693f040ba05c469036587395af07ff419b9a3c009d6ffefd3"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2d03f6aaf22788ba13a770f0d183b8eebe55545bcbb6e4c41dcccac7ded014d"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:a98f15fd9c31bf80d368698447191a2e9703880b305dbf34d9a63ce634b8a557"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:42128916cc2966623832aabbd224c88e862d1c531d6bc49ab141f565e6321a90"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6e27986a166903ad7000635f6faed8ab5072d687f822ac9f692c40b2470aebcf"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:5e9edcd743a830d0c0b2729201e42ab86fceef8f4086df65563f482e4544359e"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:d33145bbfd32e916f1c911cd9225be5364a36c3959742a0cc4dfc0692d6a2a5e"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b27cc2544b69a357ab2a749dc0c13a1b9055198c56f4c2c3b0f61d693f8e203"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3cdf8982b6def987e95b37984d0c1c878de32635dd78acde3273f730b69708c9"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71ac661a7365e134741fe5542f13d7ce1e6187446b96ddee4c8b7d153fc8f05a"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:35a902ba31a71a3e9e114e44473624d9aa9f9b85ec981bfa91671aefe0ef1a6c"}, - {file = "google_re2-1.1-1-cp311-cp311-win32.whl", hash = "sha256:9469f26b485da2784c658e687a766c72e1a17b1e63b3ed24b5f64c3d19fbae3d"}, - {file = "google_re2-1.1-1-cp311-cp311-win_amd64.whl", hash = "sha256:07dd0780240ee431781119b46c3bbf76f5cef24a2cbb542f6a08c643e0a68d98"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9857dc4d69b8025057c8129e98406a24d51bdaf1b96e481dbba7e69e0ec85104"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:a6eaaa5f200022eb0bdded5949c91454fc96e1edd6f9e9a96dd1dc32c821c00e"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a32bb2afe128d90b8edc20d4f7d297f7e2753206eba92937a57e5280736eac74"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:4f2754616c61b76ab4e5a4f39892a52a00897203b859c5abd7e3c630dd883cda"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:b110f3d657e8f67a43a699d327ce47095b80180ea1118e2de44cb5c7002503d9"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:fd62ba2853eef65e249a9c4437a9ecac568222062bc956f0c61a3d1151a6271b"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23b50eb74dc3e1d480b04b987c61242df5dade50d08bc16e25eb3582b83fca80"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1bde89855dd5ab0811187d21eec149975510c80e865c771c883524a452445e7"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10c6cddc720151a509beb98ab310fa0cc8bcb265f83518ebf831de2c9ff73af0"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bea09c5e8401ec50b8f211bc820ec2f0ca5e744ac67431a1b39bdacbd266553"}, - {file = "google_re2-1.1-1-cp38-cp38-win32.whl", hash = "sha256:ffa51b118037518bcdf63c7649d0b4be7071982b83f48ee3bbabf24a9cb48f8a"}, - {file = "google_re2-1.1-1-cp38-cp38-win_amd64.whl", hash = "sha256:3b47715b6d43c9351957eb5092ad0fa625d04106d81f34cb8a726c53395ad474"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:998f31bf7efbc9bb603d0c356c1c77e5331f689c71783df8e21e67bb025fc66a"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0b5f0eaab859d3ba5f462c82bf37ab56e9d37e19b40b5898c731dbe4213a85f7"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f6d591d9c4cbc7142b729ddcc3f654d059d8ebc3bc95891198808a4785a6b4d8"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:3c325c2eae197b423330a04ab62e2e1cf942676cd5560907db4d63e23ce0648a"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:1e019e8f57955806ee843254ce454249b58800a6e872b2c8e9df2ef3459de0d5"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:58ebbcc7ad2b639768a6bca586357291660ea40dfac83039208e5055c357513b"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:723f8553e7fc022294071f14fb7dfc7958c365dc7d4a71d4938ccd2df8c6eca4"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d81512b08e6787fc8ef29fea365d3fdbf957553a625550e1d96c36877ae30355"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c58601b155651cc572a23ee2860788c77581aad85d3567a55b89b0674702f34d"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6c9f64b9724ec38da8e514f404ac64e9a6a5e8b1d7031c2dadd05c1f4c16fd"}, + {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-re2" +version = "1.1" +description = "RE2 Python bindings" +optional = false +python-versions = "~=3.8" +files = [ + {file = "google-re2-1.1.tar.gz", hash = "sha256:d3a9467ee52b46ac77ca928f6d0cbeaccfd92f03ca0f0f65b9df6a95184f3a1c"}, + {file = "google_re2-1.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:874d2e36dfa506b03d4f9c4aef1701a65304f4004c96c7edac7d8aea08fe193e"}, + {file = "google_re2-1.1-1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b66eb84850afdce09aabca40bcd6f2a0e96178a1b4990d555678edb1f59bf255"}, + {file = "google_re2-1.1-1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c461640a07db26dc2b51f43de607b7520e7debaf4f6a000f796a3c0196ca52af"}, + {file = "google_re2-1.1-1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7f9ba69eaee6e7a9f5ddfb919bf1a866af14a18b26a179e3fb1a6fe3d0cbf349"}, + {file = "google_re2-1.1-1-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:f95cf16739cc3ea63728366881221b119f2322b4b739b7da6522d45a68792cea"}, + {file = "google_re2-1.1-1-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:9fb56a41250191298e6a2859b0fdea1e83330c9870fe8d84e5836c506ae46e96"}, + {file = "google_re2-1.1-1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb22ea995564d87baf4a4bfbb3ca024be913683a710f4f0dc9c94dc663afab20"}, + {file = "google_re2-1.1-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19b3f0bfbb2a2ca58ed0aaa9356d07a5c0921383a6dbeca086b2b74472f5ee08"}, + {file = "google_re2-1.1-1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34fd7f97b84af7453cf05b25adfe2491ba3cef1ca548ac2907efa63d3510954d"}, + {file = "google_re2-1.1-1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e029664192d8d30f7c977706183ef483e82ca239302272df74e01d2e22897ca"}, + {file = "google_re2-1.1-1-cp310-cp310-win32.whl", hash = "sha256:41a8f222f9839d059d37efd28e4deec203502d7e39c3759d83d6a33deadf1d2e"}, + {file = "google_re2-1.1-1-cp310-cp310-win_amd64.whl", hash = "sha256:6141d569fdf72aa693f040ba05c469036587395af07ff419b9a3c009d6ffefd3"}, + {file = "google_re2-1.1-1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2d03f6aaf22788ba13a770f0d183b8eebe55545bcbb6e4c41dcccac7ded014d"}, + {file = "google_re2-1.1-1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:a98f15fd9c31bf80d368698447191a2e9703880b305dbf34d9a63ce634b8a557"}, + {file = "google_re2-1.1-1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:42128916cc2966623832aabbd224c88e862d1c531d6bc49ab141f565e6321a90"}, + {file = "google_re2-1.1-1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6e27986a166903ad7000635f6faed8ab5072d687f822ac9f692c40b2470aebcf"}, + {file = "google_re2-1.1-1-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:5e9edcd743a830d0c0b2729201e42ab86fceef8f4086df65563f482e4544359e"}, + {file = "google_re2-1.1-1-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:d33145bbfd32e916f1c911cd9225be5364a36c3959742a0cc4dfc0692d6a2a5e"}, + {file = "google_re2-1.1-1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b27cc2544b69a357ab2a749dc0c13a1b9055198c56f4c2c3b0f61d693f8e203"}, + {file = "google_re2-1.1-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3cdf8982b6def987e95b37984d0c1c878de32635dd78acde3273f730b69708c9"}, + {file = "google_re2-1.1-1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71ac661a7365e134741fe5542f13d7ce1e6187446b96ddee4c8b7d153fc8f05a"}, + {file = "google_re2-1.1-1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:35a902ba31a71a3e9e114e44473624d9aa9f9b85ec981bfa91671aefe0ef1a6c"}, + {file = "google_re2-1.1-1-cp311-cp311-win32.whl", hash = "sha256:9469f26b485da2784c658e687a766c72e1a17b1e63b3ed24b5f64c3d19fbae3d"}, + {file = "google_re2-1.1-1-cp311-cp311-win_amd64.whl", hash = "sha256:07dd0780240ee431781119b46c3bbf76f5cef24a2cbb542f6a08c643e0a68d98"}, + {file = "google_re2-1.1-1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9857dc4d69b8025057c8129e98406a24d51bdaf1b96e481dbba7e69e0ec85104"}, + {file = "google_re2-1.1-1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:a6eaaa5f200022eb0bdded5949c91454fc96e1edd6f9e9a96dd1dc32c821c00e"}, + {file = "google_re2-1.1-1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a32bb2afe128d90b8edc20d4f7d297f7e2753206eba92937a57e5280736eac74"}, + {file = "google_re2-1.1-1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:4f2754616c61b76ab4e5a4f39892a52a00897203b859c5abd7e3c630dd883cda"}, + {file = "google_re2-1.1-1-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:b110f3d657e8f67a43a699d327ce47095b80180ea1118e2de44cb5c7002503d9"}, + {file = "google_re2-1.1-1-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:fd62ba2853eef65e249a9c4437a9ecac568222062bc956f0c61a3d1151a6271b"}, + {file = "google_re2-1.1-1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23b50eb74dc3e1d480b04b987c61242df5dade50d08bc16e25eb3582b83fca80"}, + {file = "google_re2-1.1-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1bde89855dd5ab0811187d21eec149975510c80e865c771c883524a452445e7"}, + {file = "google_re2-1.1-1-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10c6cddc720151a509beb98ab310fa0cc8bcb265f83518ebf831de2c9ff73af0"}, + {file = "google_re2-1.1-1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bea09c5e8401ec50b8f211bc820ec2f0ca5e744ac67431a1b39bdacbd266553"}, + {file = "google_re2-1.1-1-cp38-cp38-win32.whl", hash = "sha256:ffa51b118037518bcdf63c7649d0b4be7071982b83f48ee3bbabf24a9cb48f8a"}, + {file = "google_re2-1.1-1-cp38-cp38-win_amd64.whl", hash = "sha256:3b47715b6d43c9351957eb5092ad0fa625d04106d81f34cb8a726c53395ad474"}, + {file = "google_re2-1.1-1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:998f31bf7efbc9bb603d0c356c1c77e5331f689c71783df8e21e67bb025fc66a"}, + {file = "google_re2-1.1-1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0b5f0eaab859d3ba5f462c82bf37ab56e9d37e19b40b5898c731dbe4213a85f7"}, + {file = "google_re2-1.1-1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f6d591d9c4cbc7142b729ddcc3f654d059d8ebc3bc95891198808a4785a6b4d8"}, + {file = "google_re2-1.1-1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:3c325c2eae197b423330a04ab62e2e1cf942676cd5560907db4d63e23ce0648a"}, + {file = "google_re2-1.1-1-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:1e019e8f57955806ee843254ce454249b58800a6e872b2c8e9df2ef3459de0d5"}, + {file = "google_re2-1.1-1-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:58ebbcc7ad2b639768a6bca586357291660ea40dfac83039208e5055c357513b"}, + {file = "google_re2-1.1-1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:723f8553e7fc022294071f14fb7dfc7958c365dc7d4a71d4938ccd2df8c6eca4"}, + {file = "google_re2-1.1-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d81512b08e6787fc8ef29fea365d3fdbf957553a625550e1d96c36877ae30355"}, + {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c58601b155651cc572a23ee2860788c77581aad85d3567a55b89b0674702f34d"}, + {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6c9f64b9724ec38da8e514f404ac64e9a6a5e8b1d7031c2dadd05c1f4c16fd"}, {file = "google_re2-1.1-1-cp39-cp39-win32.whl", hash = "sha256:d1b751b9ab9f8e2ab2a36d72b909281ce65f328c9115a1685acae1a2d1afd7a4"}, {file = "google_re2-1.1-1-cp39-cp39-win_amd64.whl", hash = "sha256:ac775c75cec7069351d201da4e0fb0cae4c1c5ebecd08fa34e1be89740c1d80b"}, {file = "google_re2-1.1-2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5eaefe4705b75ca5f78178a50104b689e9282f868e12f119b26b4cffc0c7ee6e"}, @@ -2706,9 +3593,12 @@ files = [ name = "google-resumable-media" version = "2.5.0" description = "Utilities for Google Media Downloads and Resumable Uploads" -category = "main" optional = true python-versions = ">= 3.7" +files = [ + {file = "google-resumable-media-2.5.0.tar.gz", hash = "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"}, + {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"}, +] [package.dependencies] google-crc32c = ">=1.0,<2.0dev" @@ -2721,9 +3611,12 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] name = "googleapis-common-protos" version = "1.60.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, + {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, +] [package.dependencies] grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} @@ -2736,9 +3629,11 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "grapheme" version = "0.6.0" description = "Unicode grapheme helpers" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "grapheme-0.6.0.tar.gz", hash = "sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca"}, +] [package.extras] test = ["pytest", "sphinx", "sphinx-autobuild", "twine", "wheel"] @@ -2747,9 +3642,68 @@ test = ["pytest", "sphinx", "sphinx-autobuild", "twine", "wheel"] name = "greenlet" version = "3.0.3" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] [package.extras] docs = ["Sphinx", "furo"] @@ -2759,9 +3713,12 @@ test = ["objgraph", "psutil"] name = "grpc-google-iam-v1" version = "0.12.6" description = "IAM API client library" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.12.6.tar.gz", hash = "sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f"}, + {file = "grpc_google_iam_v1-0.12.6-py2.py3-none-any.whl", hash = "sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c"}, +] [package.dependencies] googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} @@ -2772,9 +3729,55 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 name = "grpcio" version = "1.57.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "grpcio-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6"}, + {file = "grpcio-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649"}, + {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f"}, + {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a"}, + {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019"}, + {file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527"}, + {file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca"}, + {file = "grpcio-1.57.0-cp310-cp310-win32.whl", hash = "sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb"}, + {file = "grpcio-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56"}, + {file = "grpcio-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652"}, + {file = "grpcio-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e"}, + {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e"}, + {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b"}, + {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e"}, + {file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d"}, + {file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a"}, + {file = "grpcio-1.57.0-cp311-cp311-win32.whl", hash = "sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe"}, + {file = "grpcio-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0"}, + {file = "grpcio-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf"}, + {file = "grpcio-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5"}, + {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2"}, + {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73"}, + {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0"}, + {file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc"}, + {file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2"}, + {file = "grpcio-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98"}, + {file = "grpcio-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51"}, + {file = "grpcio-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0"}, + {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6"}, + {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941"}, + {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f"}, + {file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06"}, + {file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79"}, + {file = "grpcio-1.57.0-cp38-cp38-win32.whl", hash = "sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb"}, + {file = "grpcio-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16"}, + {file = "grpcio-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b"}, + {file = "grpcio-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6"}, + {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b"}, + {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e"}, + {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5"}, + {file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9"}, + {file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37"}, + {file = "grpcio-1.57.0-cp39-cp39-win32.whl", hash = "sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766"}, + {file = "grpcio-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056"}, + {file = "grpcio-1.57.0.tar.gz", hash = "sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613"}, +] [package.extras] protobuf = ["grpcio-tools (>=1.57.0)"] @@ -2783,9 +3786,12 @@ protobuf = ["grpcio-tools (>=1.57.0)"] name = "grpcio-status" version = "1.57.0" description = "Status proto mapping for gRPC" -category = "main" optional = true python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.57.0.tar.gz", hash = "sha256:b098da99df1eebe58337f8f78e50df990273ccacc1226fddeb47c590e3df9e02"}, + {file = "grpcio_status-1.57.0-py3-none-any.whl", hash = "sha256:15d6af055914ebbc4ed17e55ebfb8e6bb17a45a57fea32e6af19978fb7844690"}, +] [package.dependencies] googleapis-common-protos = ">=1.5.5" @@ -2796,12 +3802,58 @@ protobuf = ">=4.21.6" name = "grpcio-tools" version = "1.57.0" description = "Protobuf code generator for gRPC" -category = "main" optional = true python-versions = ">=3.7" - -[package.dependencies] -grpcio = ">=1.57.0" +files = [ + {file = "grpcio-tools-1.57.0.tar.gz", hash = "sha256:2f16130d869ce27ecd623194547b649dd657333ec7e8644cc571c645781a9b85"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:4fb8a8468031f858381a576078924af364a08833d8f8f3237018252c4573a802"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:35bf0dad8a3562043345236c26d0053a856fb06c04d7da652f2ded914e508ae7"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:ec9aab2fb6783c7fc54bc28f58eb75f1ca77594e6b0fd5e5e7a8114a95169fe0"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf5fc0a1c23f8ea34b408b72fb0e90eec0f404ad4dba98e8f6da3c9ce34e2ed"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e69d08a515554e0cfe1ec4d31568836f4b17f0ff82294f957f629388629eb9"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c39a3656576b6fdaaf28abe0467f7a7231df4230c1bee132322dbc3209419e7f"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f64f8ab22d27d4a5693310748d35a696061c3b5c7b8c4fb4ab3b4bc1068b6b56"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-win32.whl", hash = "sha256:d2a134756f4db34759a5cc7f7e43f7eb87540b68d1cca62925593c6fb93924f7"}, + {file = "grpcio_tools-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a3d60fb8d46ede26c1907c146561b3a9caa20a7aff961bc661ef8226f85a2e9"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:aac98ecad8f7bd4301855669d42a5d97ef7bb34bec2b1e74c7a0641d47e313cf"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:cdd020cb68b51462983b7c2dfbc3eb6ede032b8bf438d4554df0c3f08ce35c76"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:f54081b08419a39221cd646363b5708857c696b3ad4784f1dcf310891e33a5f7"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed85a0291fff45b67f2557fe7f117d3bc7af8b54b8619d27bf374b5c8b7e3ca2"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e868cd6feb3ef07d4b35be104fe1fd0657db05259ff8f8ec5e08f4f89ca1191d"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dfb6f6120587b8e228a3cae5ee4985b5bdc18501bad05c49df61965dfc9d70a9"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a7ad7f328e28fc97c356d0f10fb10d8b5151bb65aa7cf14bf8084513f0b7306"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-win32.whl", hash = "sha256:9867f2817b1a0c93c523f89ac6c9d8625548af4620a7ce438bf5a76e23327284"}, + {file = "grpcio_tools-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:1f9e917a9f18087f6c14b4d4508fb94fca5c2f96852363a89232fb9b2124ac1f"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:9f2aefa8a37bd2c4db1a3f1aca11377e2766214520fb70e67071f4ff8d8b0fa5"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:850cbda0ec5d24c39e7215ede410276040692ca45d105fbbeada407fa03f0ac0"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6fa52972c9647876ea35f6dc2b51002a74ed900ec7894586cbb2fe76f64f99de"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0eea89d7542719594e50e2283f51a072978b953e8b3e9fd7c59a2c762d4c1"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3da5240211252fc70a6451fe00c143e2ab2f7bfc2445695ad2ed056b8e48d96"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a0256f8786ac9e4db618a1aa492bb3472569a0946fd3ee862ffe23196323da55"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c026bdf5c1366ce88b7bbe2d8207374d675afd3fd911f60752103de3da4a41d2"}, + {file = "grpcio_tools-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9053c2f655589545be08b9d6a673e92970173a4bf11a4b9f18cd6e9af626b587"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:81ec4dbb696e095057b2528d11a8da04be6bbe2b967fa07d4ea9ba6354338cbf"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:495e2946406963e0b9f063f76d5af0f2a19517dac2b367b5b044432ac9194296"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:7b46fc6aa8eb7edd18cafcd21fd98703cb6c09e46b507de335fca7f0161dfccb"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb81ff861692111fa81bd85f64584e624cb4013bd66fbce8a209b8893f5ce398"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a42dc220eb5305f470855c9284f4c8e85ae59d6d742cd07946b0cbe5e9ca186"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90d10d9038ba46a595a223a34f136c9230e3d6d7abc2433dbf0e1c31939d3a8b"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5bc3e6d338aefb052e19cedabe00452be46d0c10a4ed29ee77abb00402e438fe"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-win32.whl", hash = "sha256:34b36217b17b5bea674a414229913e1fd80ede328be51e1b531fcc62abd393b0"}, + {file = "grpcio_tools-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbde4004a0688400036342ff73e3706e8940483e2871547b1354d59e93a38277"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:784574709b9690dc28696617ea69352e2132352fdfc9bc89afa8e39f99ae538e"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:85ac4e62eb44428cde025fd9ab7554002315fc7880f791c553fc5a0015cc9931"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:dc771d4db5701f280957bbcee91745e0686d00ed1c6aa7e05ba30a58b02d70a1"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3ac06703c412f8167a9062eaf6099409967e33bf98fa5b02be4b4689b6bdf39"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02d78c034109f46032c7217260066d49d41e6bcaf588fa28fa40fe2f83445347"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2db25f15ed44327f2e02d0c4fe741ac966f9500e407047d8a7c7fccf2df65616"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b417c97936d94874a3ce7ed8deab910f2233e3612134507cfee4af8735c38a6"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-win32.whl", hash = "sha256:f717cce5093e6b6049d9ea6d12fdf3658efdb1a80772f7737db1f8510b876df6"}, + {file = "grpcio_tools-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:1c0e8a1a32973a5d59fbcc19232f925e5c48116e9411f788033a31c5ca5130b4"}, +] + +[package.dependencies] +grpcio = ">=1.57.0" protobuf = ">=4.21.6,<5.0dev" setuptools = "*" @@ -2809,9 +3861,12 @@ setuptools = "*" name = "gunicorn" version = "21.2.0" description = "WSGI HTTP Server for UNIX" -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, + {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, +] [package.dependencies] packaging = "*" @@ -2826,17 +3881,23 @@ tornado = ["tornado (>=0.2)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] [[package]] name = "h2" version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = true python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] [package.dependencies] hpack = ">=4.0,<5" @@ -2846,9 +3907,12 @@ hyperframe = ">=6.0,<7" name = "hexbytes" version = "0.3.1" description = "hexbytes: Python `bytes` subclass that decodes hex, with a readable console output" -category = "main" optional = false python-versions = ">=3.7, <4" +files = [ + {file = "hexbytes-0.3.1-py3-none-any.whl", hash = "sha256:383595ad75026cf00abd570f44b368c6cdac0c6becfae5c39ff88829877f8a59"}, + {file = "hexbytes-0.3.1.tar.gz", hash = "sha256:a3fe35c6831ee8fafd048c4c086b986075fc14fd46258fa24ecb8d65745f9a9d"}, +] [package.extras] dev = ["black (>=22)", "bumpversion (>=0.5.3)", "eth-utils (>=1.0.1,<3)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "hypothesis (>=3.44.24,<=6.31.6)", "ipython", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=5.0.0)", "pytest (>=7.0.0)", "pytest-watch (>=4.1.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] @@ -2860,35 +3924,44 @@ test = ["eth-utils (>=1.0.1,<3)", "hypothesis (>=3.44.24,<=6.31.6)", "pytest (>= name = "hpack" version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = true python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] [[package]] name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, +] [package.dependencies] anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httplib2" version = "0.22.0" description = "A comprehensive HTTP client library." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] [package.dependencies] pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} @@ -2897,9 +3970,12 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 name = "httpx" version = "0.24.1" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, + {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, +] [package.dependencies] certifi = "*" @@ -2910,17 +3986,20 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} @@ -2929,9 +4008,12 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve name = "humanize" version = "4.8.0" description = "Python humanize utilities" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "humanize-4.8.0-py3-none-any.whl", hash = "sha256:8bc9e2bb9315e61ec06bf690151ae35aeb65651ab091266941edf97c90836404"}, + {file = "humanize-4.8.0.tar.gz", hash = "sha256:9783373bf1eec713a770ecaa7c2d7a7902c98398009dfa3d8a2df91eec9311e8"}, +] [package.extras] tests = ["freezegun", "pytest", "pytest-cov"] @@ -2940,25 +4022,34 @@ tests = ["freezegun", "pytest", "pytest-cov"] name = "hyperframe" version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = true python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] [[package]] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] [[package]] name = "importlib-metadata" version = "6.11.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] [package.dependencies] zipp = ">=0.5" @@ -2972,9 +4063,12 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.0.1" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.0.1-py3-none-any.whl", hash = "sha256:134832a506243891221b88b4ae1213327eea96ceb4e407a00d790bb0626f45cf"}, + {file = "importlib_resources-6.0.1.tar.gz", hash = "sha256:4359457e42708462b9626a04657c6208ad799ceb41e5c58c57ffa0e6a098a5d4"}, +] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} @@ -2987,25 +4081,34 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] [[package]] name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] [package.dependencies] six = "*" @@ -3014,9 +4117,12 @@ six = "*" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] [package.extras] colors = ["colorama (>=0.4.3)"] @@ -3028,17 +4134,23 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] [[package]] name = "jaraco-classes" version = "3.3.0" description = "Utility functions for Python class constructs" -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"}, + {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"}, +] [package.dependencies] more-itertools = "*" @@ -3051,9 +4163,12 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] [package.extras] test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] @@ -3063,9 +4178,12 @@ trio = ["async_generator", "trio"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] [package.dependencies] MarkupSafe = ">=2.0" @@ -3077,9 +4195,12 @@ i18n = ["Babel (>=2.7)"] name = "jinxed" version = "1.2.0" description = "Jinxed Terminal Library" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, + {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, +] [package.dependencies] ansicon = {version = "*", markers = "platform_system == \"Windows\""} @@ -3088,17 +4209,24 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = true python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] [[package]] name = "jsonpath-ng" version = "1.5.3" description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." -category = "main" optional = false python-versions = "*" +files = [ + {file = "jsonpath-ng-1.5.3.tar.gz", hash = "sha256:a273b182a82c1256daab86a313b937059261b5c5f8c4fa3fc38b882b344dd567"}, + {file = "jsonpath_ng-1.5.3-py2-none-any.whl", hash = "sha256:f75b95dbecb8a0f3b86fd2ead21c2b022c3f5770957492b9b6196ecccfeb10aa"}, + {file = "jsonpath_ng-1.5.3-py3-none-any.whl", hash = "sha256:292a93569d74029ba75ac2dc3d3630fc0e17b2df26119a165fa1d498ca47bf65"}, +] [package.dependencies] decorator = "*" @@ -3109,9 +4237,12 @@ six = "*" name = "jsonschema" version = "4.19.0" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.19.0-py3-none-any.whl", hash = "sha256:043dc26a3845ff09d20e4420d6012a9c91c9aa8999fa184e7efcfeccb41e32cb"}, + {file = "jsonschema-4.19.0.tar.gz", hash = "sha256:6e1e7569ac13be8139b2dd2c21a55d350066ee3f80df06c608b398cdc6f30e8f"}, +] [package.dependencies] attrs = ">=22.2.0" @@ -3129,9 +4260,12 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] [package.dependencies] importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} @@ -3141,9 +4275,12 @@ referencing = ">=0.28.0" name = "keyring" version = "24.2.0" description = "Store and access your passwords safely." -category = "main" optional = true python-versions = ">=3.8" +files = [ + {file = "keyring-24.2.0-py3-none-any.whl", hash = "sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6"}, + {file = "keyring-24.2.0.tar.gz", hash = "sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509"}, +] [package.dependencies] importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} @@ -3162,17 +4299,57 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, +] [[package]] name = "leather" version = "0.3.4" description = "Python charting for 80% of humans." -category = "main" optional = false python-versions = "*" +files = [ + {file = "leather-0.3.4-py2.py3-none-any.whl", hash = "sha256:5e741daee96e9f1e9e06081b8c8a10c4ac199301a0564cdd99b09df15b4603d2"}, + {file = "leather-0.3.4.tar.gz", hash = "sha256:b43e21c8fa46b2679de8449f4d953c06418666dc058ce41055ee8a8d3bb40918"}, +] [package.dependencies] six = ">=1.6.1" @@ -3181,9 +4358,12 @@ six = ">=1.6.1" name = "limits" version = "3.6.0" description = "Rate limiting utilities" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "limits-3.6.0-py3-none-any.whl", hash = "sha256:32fe29a398352c71bc43d53773117d47e22c5ea4200aef28d3f5fdee10334cd7"}, + {file = "limits-3.6.0.tar.gz", hash = "sha256:57a9c69fd37ad1e4fa3886dff8d035227e1f6af87f47e9118627e72cf1ced3bf"}, +] [package.dependencies] deprecated = ">=1.2" @@ -3207,9 +4387,12 @@ rediscluster = ["redis (>=4.2.0,!=4.5.2,!=4.5.3)"] name = "linkify-it-py" version = "2.0.2" description = "Links recognition library with FULL unicode support." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "linkify-it-py-2.0.2.tar.gz", hash = "sha256:19f3060727842c254c808e99d465c80c49d2c7306788140987a1a7a29b0d6ad2"}, + {file = "linkify_it_py-2.0.2-py3-none-any.whl", hash = "sha256:a3a24428f6c96f27370d7fe61d2ac0be09017be5190d68d8658233171f1b6541"}, +] [package.dependencies] uc-micro-py = "*" @@ -3224,71 +4407,220 @@ test = ["coverage", "pytest", "pytest-cov"] name = "lockfile" version = "0.12.2" description = "Platform-independent file locking module" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, + {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, +] [[package]] name = "logbook" version = "1.5.3" description = "A logging replacement for Python" -category = "main" optional = false python-versions = "*" +files = [ + {file = "Logbook-1.5.3-cp27-cp27m-win32.whl", hash = "sha256:56ee54c11df3377314cedcd6507638f015b4b88c0238c2e01b5eb44fd3a6ad1b"}, + {file = "Logbook-1.5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:2dc85f1510533fddb481e97677bb7bca913560862734c0b3b289bfed04f78c92"}, + {file = "Logbook-1.5.3-cp35-cp35m-win32.whl", hash = "sha256:94e2e11ff3c2304b0d09a36c6208e5ae756eb948b210e5cbd63cd8d27f911542"}, + {file = "Logbook-1.5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:97fee1bd9605f76335b169430ed65e15e457a844b2121bd1d90a08cf7e30aba0"}, + {file = "Logbook-1.5.3-cp36-cp36m-win32.whl", hash = "sha256:7c533eb728b3d220b1b5414ba4635292d149d79f74f6973b4aa744c850ca944a"}, + {file = "Logbook-1.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:e18f7422214b1cf0240c56f884fd9c9b4ff9d0da2eabca9abccba56df7222f66"}, + {file = "Logbook-1.5.3-cp37-cp37m-win32.whl", hash = "sha256:8f76a2e7b1f72595f753228732f81ce342caf03babc3fed6bbdcf366f2f20f18"}, + {file = "Logbook-1.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0cf2cdbfb65a03b5987d19109dacad13417809dcf697f66e1a7084fb21744ea9"}, + {file = "Logbook-1.5.3.tar.gz", hash = "sha256:66f454ada0f56eae43066f604a222b09893f98c1adc18df169710761b8f32fe8"}, +] [package.extras] -all = ["Jinja2", "brotli", "cython", "execnet (>=1.0.9)", "pytest (>4.0)", "pytest-cov (>=2.6)", "pyzmq", "redis", "sqlalchemy"] +all = ["Jinja2", "brotli", "cython", "execnet (>=1.0.9)", "mock", "pytest", "pytest-cov (<2.6)", "pyzmq", "redis", "sqlalchemy"] compression = ["brotli"] -dev = ["cython", "pytest (>4.0)", "pytest-cov (>=2.6)"] +dev = ["cython", "mock", "pytest", "pytest-cov (<2.6)"] execnet = ["execnet (>=1.0.9)"] jinja = ["Jinja2"] redis = ["redis"] sqlalchemy = ["sqlalchemy"] -test = ["pytest (>4.0)", "pytest-cov (>=2.6)"] +test = ["mock", "pytest", "pytest-cov (<2.6)"] zmq = ["pyzmq"] [[package]] name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] - -[[package]] -name = "lz4" -version = "4.3.3" -description = "LZ4 Bindings for Python" -category = "main" -optional = true -python-versions = ">=3.8" - -[package.extras] -docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] -flake8 = ["flake8"] -tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] - -[[package]] -name = "makefun" -version = "1.15.1" -description = "Small library to dynamically create python functions." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "mako" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + +[[package]] +name = "lz4" +version = "4.3.3" +description = "LZ4 Bindings for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, + {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, + {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, + {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, + {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, + {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, + {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, + {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, + {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, + {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, + {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, + {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, + {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, +] + +[package.extras] +docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] +flake8 = ["flake8"] +tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] + +[[package]] +name = "makefun" +version = "1.15.1" +description = "Small library to dynamically create python functions." +optional = false +python-versions = "*" +files = [ + {file = "makefun-1.15.1-py2.py3-none-any.whl", hash = "sha256:a63cfc7b47a539c76d97bd4fdb833c7d0461e759fd1225f580cb4be6200294d4"}, + {file = "makefun-1.15.1.tar.gz", hash = "sha256:40b0f118b6ded0d8d78c78f1eb679b8b6b2462e3c1b3e05fb1b2da8cd46b48a5"}, +] + +[[package]] +name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, +] [package.dependencies] MarkupSafe = ">=0.9.2" @@ -3302,9 +4634,12 @@ testing = ["pytest"] name = "markdown" version = "3.4.4" description = "Python implementation of John Gruber's Markdown." -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, + {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, +] [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} @@ -3317,9 +4652,12 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] [package.dependencies] mdurl = ">=0.1,<1.0" @@ -3338,17 +4676,81 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] [[package]] name = "marshmallow" version = "3.20.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "dev" optional = false python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, + {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, +] [package.dependencies] packaging = ">=17.0" @@ -3363,9 +4765,12 @@ tests = ["pytest", "pytz", "simplejson"] name = "marshmallow-oneofschema" version = "3.0.1" description = "marshmallow multiplexing schema" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "marshmallow-oneofschema-3.0.1.tar.gz", hash = "sha256:62cd2099b29188c92493c2940ee79d1bf2f2619a71721664e5a98ec2faa58237"}, + {file = "marshmallow_oneofschema-3.0.1-py2.py3-none-any.whl", hash = "sha256:bd29410a9f2f7457a2b428286e2a80ef76b8ddc3701527dc1f935a88914b02f2"}, +] [package.dependencies] marshmallow = ">=3.0.0,<4.0.0" @@ -3379,9 +4784,12 @@ tests = ["mock", "pytest"] name = "marshmallow-sqlalchemy" version = "0.26.1" description = "SQLAlchemy integration with the marshmallow (de)serialization library" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "marshmallow-sqlalchemy-0.26.1.tar.gz", hash = "sha256:d8525f74de51554b5c8491effe036f60629a426229befa33ff614c8569a16a73"}, + {file = "marshmallow_sqlalchemy-0.26.1-py2.py3-none-any.whl", hash = "sha256:ba7493eeb8669a3bf00d8f906b657feaa87a740ae9e4ecf829cfd6ddf763d276"}, +] [package.dependencies] marshmallow = ">=3.0.0" @@ -3397,9 +4805,12 @@ tests = ["pytest", "pytest-lazy-fixture"] name = "mashumaro" version = "3.11" description = "Fast and well tested serialization library" -category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "mashumaro-3.11-py3-none-any.whl", hash = "sha256:8f858bdb33790db6d9f3087dce793a26d109aeae38bed3ca9c2d7f16f19db412"}, + {file = "mashumaro-3.11.tar.gz", hash = "sha256:b0b2443be4bdad29bb209d91fe4a2a918fbd7b63cccfeb457c7eeb567db02f5e"}, +] [package.dependencies] msgpack = {version = ">=0.5.6", optional = true, markers = "extra == \"msgpack\""} @@ -3415,17 +4826,23 @@ yaml = ["pyyaml (>=3.13)"] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] [[package]] name = "mdit-py-plugins" version = "0.4.0" description = "Collection of plugins for markdown-it-py" -category = "dev" optional = false python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, + {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, +] [package.dependencies] markdown-it-py = ">=1.0.0,<4.0.0" @@ -3439,3827 +4856,34 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] [[package]] name = "minimal-snowplow-tracker" version = "0.0.2" description = "A minimal snowplow event tracker for Python. Add analytics to your Python and Django apps, webapps and games" -category = "main" optional = false python-versions = "*" - -[package.dependencies] -requests = ">=2.2.1,<3.0" -six = ">=1.9.0,<2.0" - -[[package]] -name = "mmh3" -version = "4.0.1" -description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." -category = "main" -optional = true -python-versions = "*" - -[package.extras] -test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] - -[[package]] -name = "more-itertools" -version = "10.1.0" -description = "More routines for operating on iterables, beyond itertools" -category = "main" -optional = false -python-versions = ">=3.8" - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -category = "main" -optional = true -python-versions = "*" - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "msal" -version = "1.23.0" -description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." -category = "main" -optional = true -python-versions = "*" - -[package.dependencies] -cryptography = ">=0.6,<44" -PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} -requests = ">=2.0.0,<3" - -[package.extras] -broker = ["pymsalruntime (>=0.13.2,<0.14)"] - -[[package]] -name = "msal-extensions" -version = "1.0.0" -description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." -category = "main" -optional = true -python-versions = "*" - -[package.dependencies] -msal = ">=0.4.1,<2.0.0" -portalocker = [ - {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, - {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, -] - -[[package]] -name = "msgpack" -version = "1.0.5" -description = "MessagePack serializer" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "mypy" -version = "1.6.1" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -reports = ["lxml"] - -[[package]] -name = "mypy-boto3-athena" -version = "1.28.36" -description = "Type annotations for boto3.Athena 1.28.36 service generated with mypy-boto3-builder 7.18.0" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-glue" -version = "1.28.36" -description = "Type annotations for boto3.Glue 1.28.36 service generated with mypy-boto3-builder 7.18.0" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-lakeformation" -version = "1.28.36" -description = "Type annotations for boto3.LakeFormation 1.28.36 service generated with mypy-boto3-builder 7.18.0" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-boto3-sts" -version = "1.28.37" -description = "Type annotations for boto3.STS 1.28.37 service generated with mypy-boto3-builder 7.18.2" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "natsort" -version = "8.4.0" -description = "Simple yet flexible natural sorting in Python." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -fast = ["fastnumbers (>=2.0.0)"] -icu = ["PyICU (>=1.0.0)"] - -[[package]] -name = "networkx" -version = "2.8.8" -description = "Python package for creating and manipulating graphs and networks" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=0.982)", "pre-commit (>=2.20)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.2)", "pydata-sphinx-theme (>=0.11)", "sphinx (>=5.2)", "sphinx-gallery (>=0.11)", "texext (>=0.6.6)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.9)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "nr-date" -version = "2.1.0" -description = "" -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[[package]] -name = "nr-stream" -version = "1.1.5" -description = "" -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[[package]] -name = "nr-util" -version = "0.8.12" -description = "General purpose Python utility library." -category = "dev" -optional = false -python-versions = ">=3.7,<4.0" - -[package.dependencies] -deprecated = ">=1.2.0,<2.0.0" -typing-extensions = ">=3.0.0" - -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -category = "main" -optional = false -python-versions = ">=3.8" - -[[package]] -name = "numpy" -version = "1.26.1" -description = "Fundamental package for array computing in Python" -category = "main" -optional = false -python-versions = "<3.13,>=3.9" - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "onnx" -version = "1.15.0" -description = "Open Neural Network Exchange" -category = "main" -optional = true -python-versions = ">=3.8" - -[package.dependencies] -numpy = "*" -protobuf = ">=3.20.2" - -[package.extras] -reference = ["Pillow", "google-re2"] - -[[package]] -name = "onnxruntime" -version = "1.16.1" -description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -category = "main" -optional = true -python-versions = "*" - -[package.dependencies] -coloredlogs = "*" -flatbuffers = "*" -numpy = ">=1.21.6" -packaging = "*" -protobuf = "*" -sympy = "*" - -[[package]] -name = "openpyxl" -version = "3.1.2" -description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -et-xmlfile = "*" - -[[package]] -name = "opentelemetry-api" -version = "1.15.0" -description = "OpenTelemetry Python API" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -deprecated = ">=1.2.6" -setuptools = ">=16.0" - -[[package]] -name = "opentelemetry-exporter-otlp" -version = "1.15.0" -description = "OpenTelemetry Collector Exporters" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -opentelemetry-exporter-otlp-proto-grpc = "1.15.0" -opentelemetry-exporter-otlp-proto-http = "1.15.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.15.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-proto = "1.15.0" -opentelemetry-sdk = ">=1.12,<2.0" - -[package.extras] -test = ["pytest-grpc"] - -[[package]] -name = "opentelemetry-exporter-otlp-proto-http" -version = "1.15.0" -description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -googleapis-common-protos = ">=1.52,<2.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-proto = "1.15.0" -opentelemetry-sdk = ">=1.12,<2.0" -requests = ">=2.7,<3.0" - -[package.extras] -test = ["responses (==0.22.0)"] - -[[package]] -name = "opentelemetry-proto" -version = "1.15.0" -description = "OpenTelemetry Python Proto" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.15.0" -description = "OpenTelemetry Python SDK" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -opentelemetry-api = "1.15.0" -opentelemetry-semantic-conventions = "0.36b0" -setuptools = ">=16.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.36b0" -description = "OpenTelemetry Semantic Conventions" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -dev = ["black", "mypy", "pytest"] - -[[package]] -name = "orjson" -version = "3.9.5" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "packaging" -version = "23.1" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pandas" -version = "2.0.3" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.1" - -[package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] - -[[package]] -name = "pandas" -version = "2.2.0" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" -optional = false -python-versions = ">=3.9" - -[package.dependencies] -numpy = {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""} -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "parsedatetime" -version = "2.4" -description = "Parse human-readable date/time text." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -future = "*" - -[[package]] -name = "pathspec" -version = "0.11.2" -description = "Utility library for gitignore style pattern matching of file paths." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pathvalidate" -version = "3.1.0" -description = "pathvalidate is a Python library to sanitize/validate a string such as filenames/file-paths/etc." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["Sphinx (>=2.4)", "sphinx-rtd-theme (>=1.2.2)", "urllib3 (<2)"] -test = ["Faker (>=1.0.8)", "allpairspy (>=2)", "click (>=6.2)", "pytest (>=6.0.1)", "pytest-discord (>=0.1.2)", "pytest-md-report (>=0.3)"] - -[[package]] -name = "pbr" -version = "5.11.1" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" - -[[package]] -name = "pendulum" -version = "3.0.0" -description = "Python datetimes made easy" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -"backports.zoneinfo" = {version = ">=0.2.1", markers = "python_version < \"3.9\""} -importlib-resources = {version = ">=5.9.0", markers = "python_version < \"3.9\""} -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] - -[[package]] -name = "pipdeptree" -version = "2.9.6" -description = "Command line utility to show dependency tree of packages." -category = "main" -optional = true -python-versions = ">=3.7" - -[package.extras] -graphviz = ["graphviz (>=0.20.1)"] -test = ["covdefaults (>=2.3)", "diff-cover (>=7.6)", "pip (>=23.1.2)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "virtualenv (>=20.23.1,<21)"] - -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "platformdirs" -version = "3.8.1" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] - -[[package]] -name = "pluggy" -version = "1.3.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "ply" -version = "3.11" -description = "Python Lex & Yacc" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "portalocker" -version = "2.7.0" -description = "Wraps the portalocker recipe for easy usage" -category = "main" -optional = true -python-versions = ">=3.5" - -[package.dependencies] -pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} - -[package.extras] -docs = ["sphinx (>=1.7.1)"] -redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] - -[[package]] -name = "prefixed" -version = "0.7.0" -description = "Prefixed alternative numeric library" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "prison" -version = "0.2.1" -description = "Rison encoder/decoder" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -six = "*" - -[package.extras] -dev = ["nose", "pipreqs", "twine"] - -[[package]] -name = "proto-plus" -version = "1.22.3" -description = "Beautiful, Pythonic protocol buffers." -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" - -[package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.24.2" -description = "" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "psutil" -version = "5.9.5" -description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "psycopg2-binary" -version = "2.9.7" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = true -python-versions = ">=3.6" - -[[package]] -name = "psycopg2cffi" -version = "2.9.0" -description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=master" -category = "main" -optional = true -python-versions = "*" - -[package.dependencies] -cffi = ">=1.0" -six = "*" - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pyarrow" -version = "14.0.1" -description = "Python library for Apache Arrow" -category = "main" -optional = true -python-versions = ">=3.8" - -[package.dependencies] -numpy = ">=1.16.6" - -[[package]] -name = "pyasn1" -version = "0.5.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[[package]] -name = "pyasn1-modules" -version = "0.3.0" -description = "A collection of ASN.1-based protocols modules" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" - -[[package]] -name = "pyathena" -version = "3.0.6" -description = "Python DB API 2.0 (PEP 249) client for Amazon Athena" -category = "main" -optional = true -python-versions = ">=3.8.1" - -[package.dependencies] -boto3 = ">=1.26.4" -botocore = ">=1.29.4" -fsspec = "*" -tenacity = ">=4.1.0" - -[package.extras] -arrow = ["pyarrow (>=7.0.0)"] -fastparquet = ["fastparquet (>=0.4.0)"] -pandas = ["pandas (>=1.3.0)"] -sqlalchemy = ["sqlalchemy (>=1.0.0)"] - -[[package]] -name = "pycodestyle" -version = "2.9.1" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pydantic" -version = "2.5.0" -description = "Data validation using Python type hints" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.14.1" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.14.1" -description = "" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydoc-markdown" -version = "4.8.2" -description = "Create Python API documentation in Markdown format." -category = "dev" -optional = false -python-versions = ">=3.7,<4.0" - -[package.dependencies] -click = ">=7.1,<9.0" -"databind.core" = ">=4.4.0,<5.0.0" -"databind.json" = ">=4.4.0,<5.0.0" -docspec = ">=2.2.1,<3.0.0" -docspec-python = ">=2.2.1,<3.0.0" -docstring-parser = ">=0.11,<0.12" -jinja2 = ">=3.0.0,<4.0.0" -"nr.util" = ">=0.7.5,<1.0.0" -PyYAML = ">=5.0,<7.0" -requests = ">=2.23.0,<3.0.0" -tomli = ">=2.0.0,<3.0.0" -tomli_w = ">=1.0.0,<2.0.0" -watchdog = "*" -yapf = ">=0.30.0" - -[[package]] -name = "pyflakes" -version = "2.5.0" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pygments" -version = "2.16.1" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pymongo" -version = "4.6.0" -description = "Python driver for MongoDB " -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -dnspython = ">=1.16.0,<3.0.0" - -[package.extras] -aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"] -gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] -ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] -snappy = ["python-snappy"] -test = ["pytest (>=7)"] -zstd = ["zstandard"] - -[[package]] -name = "pymysql" -version = "1.1.0" -description = "Pure Python MySQL Driver" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -ed25519 = ["PyNaCl (>=1.4.0)"] -rsa = ["cryptography"] - -[[package]] -name = "pyodbc" -version = "4.0.39" -description = "DB API Module for ODBC" -category = "main" -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[[package]] -name = "pyopenssl" -version = "23.2.0" -description = "Python wrapper module around the OpenSSL library" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" - -[package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] -test = ["flaky", "pretend", "pytest (>=3.0.1)"] - -[[package]] -name = "pyparsing" -version = "3.1.1" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pypdf2" -version = "3.0.1" -description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -crypto = ["PyCryptodome"] -dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "wheel"] -docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] -full = ["Pillow", "PyCryptodome"] -image = ["Pillow"] - -[[package]] -name = "pyreadline3" -version = "3.4.1" -description = "A python implementation of GNU readline." -category = "main" -optional = true -python-versions = "*" - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.5" -description = "Pytest support for asyncio" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-cases" -version = "3.6.14" -description = "Separate test code from test cases in pytest." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -decopatch = "*" -makefun = ">=1.9.5" - -[[package]] -name = "pytest-console-scripts" -version = "1.4.1" -description = "Pytest plugin for testing console scripts" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -pytest = ">=4.0.0" - -[[package]] -name = "pytest-forked" -version = "1.6.0" -description = "run tests in isolated forked subprocesses" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -py = "*" -pytest = ">=3.10" - -[[package]] -name = "pytest-order" -version = "1.1.0" -description = "pytest plugin to run your tests in a specific order" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pytest = [ - {version = ">=5.0", markers = "python_version < \"3.10\""}, - {version = ">=6.2.4", markers = "python_version >= \"3.10\""}, -] - -[[package]] -name = "python-daemon" -version = "3.0.1" -description = "Library to implement a well-behaved Unix daemon process." -category = "dev" -optional = false -python-versions = ">=3" - -[package.dependencies] -docutils = "*" -lockfile = ">=0.10" -setuptools = ">=62.4.0" - -[package.extras] -devel = ["coverage", "docutils", "isort", "testscenarios (>=0.4)", "testtools", "twine"] -test = ["coverage", "docutils", "testscenarios (>=0.4)", "testtools"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-nvd3" -version = "0.15.0" -description = "Python NVD3 - Chart Library for d3.js" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -Jinja2 = ">=2.8" -python-slugify = ">=1.2.5" - -[[package]] -name = "python-slugify" -version = "8.0.1" -description = "A Python slugify application that also handles Unicode" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -text-unidecode = ">=1.3" - -[package.extras] -unidecode = ["Unidecode (>=1.1.1)"] - -[[package]] -name = "pytimeparse" -version = "1.1.8" -description = "Time expression parser" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pytz" -version = "2023.3" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -category = "main" -optional = true -python-versions = "*" - -[[package]] -name = "pywin32-ctypes" -version = "0.2.2" -description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -category = "main" -optional = true -python-versions = ">=3.6" - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "qdrant-client" -version = "1.6.4" -description = "Client library for the Qdrant vector search engine" -category = "main" -optional = true -python-versions = ">=3.8,<3.13" - -[package.dependencies] -fastembed = {version = "0.1.1", optional = true, markers = "python_version < \"3.12\" and extra == \"fastembed\""} -grpcio = ">=1.41.0" -grpcio-tools = ">=1.41.0" -httpx = {version = ">=0.14.0", extras = ["http2"]} -numpy = [ - {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, - {version = ">=1.26", markers = "python_version >= \"3.12\""}, -] -portalocker = ">=2.7.0,<3.0.0" -pydantic = ">=1.10.8" -urllib3 = ">=1.26.14,<2.0.0" - -[package.extras] -fastembed = ["fastembed (==0.1.1)"] - -[[package]] -name = "redshift-connector" -version = "2.0.915" -description = "Redshift interface library" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -beautifulsoup4 = ">=4.7.0,<5.0.0" -boto3 = ">=1.9.201,<2.0.0" -botocore = ">=1.12.201,<2.0.0" -lxml = ">=4.6.5" -packaging = "*" -pytz = ">=2020.1" -requests = ">=2.23.0,<3.0.0" -scramp = ">=1.2.0,<1.5.0" -setuptools = "*" - -[package.extras] -full = ["numpy", "pandas"] - -[[package]] -name = "referencing" -version = "0.30.2" -description = "JSON Referencing + Python" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2023.12.25" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-mock" -version = "1.11.0" -description = "Mock out responses from the requests package" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -requests = ">=2.3,<3" -six = "*" - -[package.extras] -fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] - -[[package]] -name = "requests-oauthlib" -version = "1.3.1" -description = "OAuthlib authentication support for Requests." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "requirements-parser" -version = "0.5.0" -description = "This is a small Python module for parsing Pip requirement files." -category = "main" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -types-setuptools = ">=57.0.0" - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -six = "*" - -[[package]] -name = "rich" -version = "13.5.2" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" -optional = false -python-versions = ">=3.7.0" - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rich-argparse" -version = "1.3.0" -description = "Rich help formatters for argparse and optparse" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -rich = ">=11.0.0" - -[[package]] -name = "rpds-py" -version = "0.10.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" -optional = false -python-versions = ">=3.8" - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -category = "main" -optional = false -python-versions = ">=3.6,<4" - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruff" -version = "0.3.2" -description = "An extremely fast Python linter and code formatter, written in Rust." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "s3fs" -version = "2024.2.0" -description = "Convenient Filesystem interface over S3" -category = "main" -optional = true -python-versions = ">= 3.8" - -[package.dependencies] -aiobotocore = ">=2.5.4,<3.0.0" -aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" -fsspec = "2024.2.0" - -[package.extras] -awscli = ["aiobotocore[awscli] (>=2.5.4,<3.0.0)"] -boto3 = ["aiobotocore[boto3] (>=2.5.4,<3.0.0)"] - -[[package]] -name = "s3transfer" -version = "0.10.0" -description = "An Amazon S3 Transfer Manager" -category = "main" -optional = true -python-versions = ">= 3.8" - -[package.dependencies] -botocore = ">=1.33.2,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] - -[[package]] -name = "scramp" -version = "1.4.4" -description = "An implementation of the SCRAM protocol." -category = "main" -optional = true -python-versions = ">=3.7" - -[package.dependencies] -asn1crypto = ">=1.5.1" - -[[package]] -name = "secretstorage" -version = "3.3.3" -description = "Python bindings to FreeDesktop.org Secret Service API" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -cryptography = ">=2.0" -jeepney = ">=0.6" - -[[package]] -name = "semver" -version = "3.0.1" -description = "Python helper for Semantic Versioning (https://semver.org)" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "sentry-sdk" -version = "1.30.0" -description = "Python client for Sentry (https://sentry.io)" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -arq = ["arq (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -chalice = ["chalice (>=1.16.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -loguru = ["loguru (>=0.5)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=5)"] - -[[package]] -name = "setproctitle" -version = "1.3.2" -description = "A Python module to customize the process title" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "setuptools" -version = "68.1.2" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "simplejson" -version = "3.19.1" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -category = "main" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "snowflake-connector-python" -version = "3.5.0" -description = "Snowflake Connector for Python" -category = "main" -optional = true -python-versions = ">=3.8" - -[package.dependencies] -asn1crypto = ">0.24.0,<2.0.0" -certifi = ">=2017.4.17" -cffi = ">=1.9,<2.0.0" -charset-normalizer = ">=2,<4" -cryptography = ">=3.1.0,<42.0.0" -filelock = ">=3.5,<4" -idna = ">=2.5,<4" -keyring = {version = "<16.1.0 || >16.1.0,<25.0.0", optional = true, markers = "extra == \"secure-local-storage\""} -packaging = "*" -platformdirs = ">=2.6.0,<4.0.0" -pyjwt = "<3.0.0" -pyOpenSSL = ">=16.2.0,<24.0.0" -pytz = "*" -requests = "<3.0.0" -sortedcontainers = ">=2.4.0" -tomlkit = "*" -typing-extensions = ">=4.3,<5" -urllib3 = ">=1.21.1,<2.0.0" - -[package.extras] -development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"] -pandas = ["pandas (>=1.0.0,<2.1.0)", "pyarrow"] -secure-local-storage = ["keyring (!=16.1.0,<25.0.0)"] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "main" -optional = true -python-versions = "*" - -[[package]] -name = "soupsieve" -version = "2.5" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" -optional = true -python-versions = ">=3.8" - -[[package]] -name = "sqlalchemy" -version = "1.4.49" -description = "Database Abstraction Library" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlalchemy-jsonfield" -version = "1.0.1.post0" -description = "SQLALchemy JSONField implementation for storing dicts at SQL" -category = "dev" -optional = false -python-versions = ">=3.7.0" - -[package.dependencies] -sqlalchemy = "*" - -[[package]] -name = "sqlalchemy-utils" -version = "0.41.1" -description = "Various utility functions for SQLAlchemy." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -SQLAlchemy = ">=1.3" - -[package.extras] -arrow = ["arrow (>=0.3.4)"] -babel = ["Babel (>=1.3)"] -color = ["colour (>=0.0.4)"] -encrypted = ["cryptography (>=0.6)"] -intervals = ["intervals (>=0.7.1)"] -password = ["passlib (>=1.6,<2.0)"] -pendulum = ["pendulum (>=2.0.5)"] -phone = ["phonenumbers (>=5.9.2)"] -test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -timezone = ["python-dateutil"] -url = ["furl (>=0.4.1)"] - -[[package]] -name = "sqlfluff" -version = "2.3.2" -description = "The SQL Linter for Humans" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -appdirs = "*" -chardet = "*" -click = "*" -colorama = ">=0.3" -diff-cover = ">=2.5.0" -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} -Jinja2 = "*" -pathspec = "*" -pytest = "*" -pyyaml = ">=5.1" -regex = "*" -tblib = "*" -toml = {version = "*", markers = "python_version < \"3.11\""} -tqdm = "*" -typing-extensions = "*" - -[[package]] -name = "sqlparams" -version = "6.0.1" -description = "Convert between various DB API 2.0 parameter styles." -category = "main" -optional = true -python-versions = ">=3.8" - -[[package]] -name = "sqlparse" -version = "0.4.4" -description = "A non-validating SQL parser." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["build", "flake8"] -doc = ["sphinx"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "stevedore" -version = "5.1.0" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - -[[package]] -name = "sympy" -version = "1.12" -description = "Computer algebra system (CAS) in Python" -category = "main" -optional = true -python-versions = ">=3.8" - -[package.dependencies] -mpmath = ">=0.19" - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "tblib" -version = "2.0.0" -description = "Traceback serialization library." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tenacity" -version = "8.2.3" -description = "Retry code until it succeeds" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -doc = ["reno", "sphinx", "tornado (>=4.5)"] - -[[package]] -name = "termcolor" -version = "2.3.0" -description = "ANSI color formatting for output in terminal" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "text-unidecode" -version = "1.3" -description = "The most basic Text::Unidecode port" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "thrift" -version = "0.16.0" -description = "Python bindings for the Apache Thrift RPC system" -category = "main" -optional = true -python-versions = "*" - -[package.dependencies] -six = ">=1.7.2" - -[package.extras] -all = ["tornado (>=4.0)", "twisted"] -tornado = ["tornado (>=4.0)"] -twisted = ["twisted"] - -[[package]] -name = "tokenizers" -version = "0.13.3" -description = "Fast and Customizable Tokenizers" -category = "main" -optional = true -python-versions = "*" - -[package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tomli-w" -version = "1.0.0" -description = "A lil' TOML writer" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tomlkit" -version = "0.12.1" -description = "Style preserving TOML library" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tqdm" -version = "4.66.1" -description = "Fast, Extensible Progress Meter" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "typeapi" -version = "2.1.1" -description = "" -category = "dev" -optional = false -python-versions = ">=3.6.3,<4.0.0" - -[package.dependencies] -typing-extensions = ">=3.0.0" - -[[package]] -name = "types-awscrt" -version = "0.19.1" -description = "Type annotations and code completion for awscrt" -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[[package]] -name = "types-cachetools" -version = "5.3.0.6" -description = "Typing stubs for cachetools" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-click" -version = "7.1.8" -description = "Typing stubs for click" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-deprecated" -version = "1.2.9.3" -description = "Typing stubs for Deprecated" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-protobuf" -version = "4.24.0.1" -description = "Typing stubs for protobuf" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-psutil" -version = "5.9.5.16" -description = "Typing stubs for psutil" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-psycopg2" -version = "2.9.21.14" -description = "Typing stubs for psycopg2" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-python-dateutil" -version = "2.8.19.14" -description = "Typing stubs for python-dateutil" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-pyyaml" -version = "6.0.12.11" -description = "Typing stubs for PyYAML" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-requests" -version = "2.31.0.2" -description = "Typing stubs for requests" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -types-urllib3 = "*" - -[[package]] -name = "types-s3transfer" -version = "0.6.2" -description = "Type annotations and code completion for s3transfer" -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[[package]] -name = "types-setuptools" -version = "68.1.0.1" -description = "Typing stubs for setuptools" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "types-simplejson" -version = "3.19.0.2" -description = "Typing stubs for simplejson" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-sqlalchemy" -version = "1.4.53.38" -description = "Typing stubs for SQLAlchemy" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-tqdm" -version = "4.66.0.2" -description = "Typing stubs for tqdm" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-urllib3" -version = "1.26.25.14" -description = "Typing stubs for urllib3" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tzdata" -version = "2023.3" -description = "Provider of IANA time zone data" -category = "main" -optional = false -python-versions = ">=2" - -[[package]] -name = "uc-micro-py" -version = "1.0.2" -description = "Micro subset of unicode data files for linkify-it-py projects." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["coverage", "pytest", "pytest-cov"] - -[[package]] -name = "unicodecsv" -version = "0.14.1" -description = "Python2's stdlib csv module is nice, but it doesn't support unicode. This module is a drop-in replacement which *does*." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "universal-pathlib" -version = "0.2.1" -description = "pathlib api extended to use fsspec backends" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -fsspec = ">=2022.1.0" - -[package.extras] -dev = ["adlfs", "aiohttp", "cheroot", "gcsfs", "moto[s3,server] (<5)", "mypy (==1.8.0)", "packaging", "pydantic", "pydantic-settings", "pylint (==2.17.4)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-mock (==3.12.0)", "pytest-sugar (==0.9.7)", "requests", "s3fs", "webdav4[fsspec]", "wsgidav"] -tests = ["mypy (==1.8.0)", "packaging", "pylint (==2.17.4)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-mock (==3.12.0)", "pytest-sugar (==0.9.7)"] - -[[package]] -name = "uritemplate" -version = "4.1.1" -description = "Implementation of RFC 6570 URI Templates" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "urllib3" -version = "1.26.16" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "validators" -version = "0.21.0" -description = "Python Data Validation for Humans™" -category = "main" -optional = true -python-versions = ">=3.8,<4.0" - -[[package]] -name = "watchdog" -version = "3.0.0" -description = "Filesystem events monitoring" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "wcwidth" -version = "0.2.6" -description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "weaviate-client" -version = "3.23.2" -description = "A python native Weaviate client" -category = "main" -optional = true -python-versions = ">=3.8" - -[package.dependencies] -authlib = ">=1.1.0" -requests = ">=2.28.0,<=2.31.0" -tqdm = ">=4.59.0,<5.0.0" -validators = ">=0.18.2,<=0.21.0" - -[package.extras] -grpc = ["grpcio", "grpcio-tools"] - -[[package]] -name = "werkzeug" -version = "2.3.7" -description = "The comprehensive WSGI web application library." -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog (>=2.3)"] - -[[package]] -name = "wheel" -version = "0.41.2" -description = "A built-package format for Python" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - -[[package]] -name = "win-precise-time" -version = "1.4.2" -description = "" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "wrapt" -version = "1.15.0" -description = "Module for decorators, wrappers and monkey patching." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "wtforms" -version = "3.0.1" -description = "Form validation and rendering for Python web development." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = "*" - -[package.extras] -email = ["email-validator"] - -[[package]] -name = "yapf" -version = "0.33.0" -description = "A formatter for Python code." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -tomli = ">=2.0.1" - -[[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zipp" -version = "3.16.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false -python-versions = ">=3.8" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[extras] -athena = ["pyathena", "pyarrow", "s3fs", "botocore"] -az = ["adlfs"] -bigquery = ["grpcio", "google-cloud-bigquery", "pyarrow", "gcsfs"] -cli = ["pipdeptree", "cron-descriptor"] -databricks = ["databricks-sql-connector"] -dbt = ["dbt-core", "dbt-redshift", "dbt-bigquery", "dbt-duckdb", "dbt-snowflake", "dbt-athena-community", "dbt-databricks"] -duckdb = ["duckdb", "duckdb"] -filesystem = ["s3fs", "botocore"] -gcp = ["grpcio", "google-cloud-bigquery", "gcsfs"] -gs = ["gcsfs"] -motherduck = ["duckdb", "duckdb", "pyarrow"] -mssql = ["pyodbc"] -parquet = ["pyarrow"] -postgres = ["psycopg2-binary", "psycopg2cffi"] -qdrant = ["qdrant-client"] -redshift = ["psycopg2-binary", "psycopg2cffi"] -s3 = ["s3fs", "botocore"] -snowflake = ["snowflake-connector-python"] -synapse = ["pyodbc", "adlfs", "pyarrow"] -weaviate = ["weaviate-client"] - -[metadata] -lock-version = "1.1" -python-versions = ">=3.8.1,<3.13" -content-hash = "a6b9198e884e19f1c76b3fc6fcdc093df89c6c8809a408be6144b1cfec4cf164" - -[metadata.files] -about-time = [ - {file = "about-time-4.2.1.tar.gz", hash = "sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece"}, - {file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"}, -] -adlfs = [ - {file = "adlfs-2023.8.0-py3-none-any.whl", hash = "sha256:3eb248a3c2a30b419f1147bd7676d156b5219f96ef7f11d47166afd2a3bdb07e"}, - {file = "adlfs-2023.8.0.tar.gz", hash = "sha256:07e804f6df4593acfcaf01025b162e30ac13e523d3570279c98b2d91a18026d9"}, -] -agate = [ - {file = "agate-1.7.1-py2.py3-none-any.whl", hash = "sha256:23f9f412f74f97b72f82b1525ab235cc816bc8c8525d968a091576a0dbc54a5f"}, - {file = "agate-1.7.1.tar.gz", hash = "sha256:eadf46d980168b8922d5d396d6258eecd5e7dbef7e6f0c0b71e968545ea96389"}, -] -aiobotocore = [ - {file = "aiobotocore-2.11.2-py3-none-any.whl", hash = "sha256:487fede588040bfa3a43df945275c28c1c73ca75bf705295adb9fbadd2e89be7"}, - {file = "aiobotocore-2.11.2.tar.gz", hash = "sha256:6dd7352248e3523019c5a54a395d2b1c31080697fc80a9ad2672de4eec8c7abd"}, -] -aiohttp = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, -] -aioitertools = [ - {file = "aioitertools-0.11.0-py3-none-any.whl", hash = "sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394"}, - {file = "aioitertools-0.11.0.tar.gz", hash = "sha256:42c68b8dd3a69c2bf7f2233bf7df4bb58b557bca5252ac02ed5187bbc67d6831"}, -] -aiosignal = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] -alembic = [ - {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, - {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, -] -alive-progress = [ - {file = "alive-progress-3.1.4.tar.gz", hash = "sha256:74a95d8d0d42bc99d3a3725dbd06ebb852245f1b64e301a7c375b92b22663f7b"}, - {file = "alive_progress-3.1.4-py3-none-any.whl", hash = "sha256:c80ad87ce9c1054b01135a87fae69ecebbfc2107497ae87cbe6aec7e534903db"}, -] -annotated-types = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] -ansicon = [ - {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, - {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, -] -anyio = [ - {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, - {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, -] -apache-airflow = [ - {file = "apache_airflow-2.8.1-py3-none-any.whl", hash = "sha256:8178b3fd22a8766beb2e2972352f37402994a2ea4356106a6763e05807efaa88"}, - {file = "apache_airflow-2.8.1.tar.gz", hash = "sha256:7443d82b790886c5ec137a8fdb94d672e33e81336713ca7320b4a1bbad443a9c"}, -] -apache-airflow-providers-common-io = [ - {file = "apache_airflow_providers_common_io-1.3.0-py3-none-any.whl", hash = "sha256:a67c6dd3cb419c68fc1a9ed62f0f434426852e15a46c3159f367b3961332955d"}, - {file = "apache_airflow_providers_common_io-1.3.0.tar.gz", hash = "sha256:7172620a2370031970df2212a9f694a5ff82240f7e498b8b7dfdbae7e6c882d6"}, -] -apache-airflow-providers-common-sql = [ - {file = "apache-airflow-providers-common-sql-1.7.1.tar.gz", hash = "sha256:ba37f795d9656a87cf4661edc381b8ecfe930272c59324b59f8a158fd0971aeb"}, - {file = "apache_airflow_providers_common_sql-1.7.1-py3-none-any.whl", hash = "sha256:36da2f51b51a64765b0ed5e6a5fece8eaa3ca173dfbff803e2fe2a0afbb90944"}, -] -apache-airflow-providers-ftp = [ - {file = "apache-airflow-providers-ftp-3.5.1.tar.gz", hash = "sha256:dc6dc524dc7454857a0812154d7540172e36db3a87e48a4a91918ebf80898bbf"}, - {file = "apache_airflow_providers_ftp-3.5.1-py3-none-any.whl", hash = "sha256:e4ea77d6276355acfe2392c12155db7b9d51be460b7673b616dc1d8bee03c1d7"}, -] -apache-airflow-providers-http = [ - {file = "apache-airflow-providers-http-4.5.1.tar.gz", hash = "sha256:ec90920ff980fc264af9811dc72c37ef272bcdb3d007c7114e12366559426460"}, - {file = "apache_airflow_providers_http-4.5.1-py3-none-any.whl", hash = "sha256:702f26938bc22684eefecd297c2b0809793f9e43b8d911d807a29f21e69da179"}, -] -apache-airflow-providers-imap = [ - {file = "apache-airflow-providers-imap-3.3.1.tar.gz", hash = "sha256:40bac2a75e4dfbcd7d397776d90d03938facaf2707acc6cc119a8db684e53f77"}, - {file = "apache_airflow_providers_imap-3.3.1-py3-none-any.whl", hash = "sha256:adb6ef7864a5a8e245fbbd555bb4ef1eecf5b094d6d23ca0edc5f0aded50490d"}, -] -apache-airflow-providers-sqlite = [ - {file = "apache-airflow-providers-sqlite-3.4.3.tar.gz", hash = "sha256:347d2db03eaa5ea9fef414666565ffa5e849935cbc30e37237edcaa822b5ced8"}, - {file = "apache_airflow_providers_sqlite-3.4.3-py3-none-any.whl", hash = "sha256:4ffa6a50f0ea1b4e51240b657dfec3fb026c87bdfa71af908a56461df6a6f2e0"}, -] -apispec = [ - {file = "apispec-6.3.0-py3-none-any.whl", hash = "sha256:95a0b9355785df998bb0e9b939237a30ee4c7428fd6ef97305eae3da06b9b339"}, - {file = "apispec-6.3.0.tar.gz", hash = "sha256:6cb08d92ce73ff0b3bf46cb2ea5c00d57289b0f279fb0256a3df468182ba5344"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -argcomplete = [ - {file = "argcomplete-3.1.1-py3-none-any.whl", hash = "sha256:35fa893a88deea85ea7b20d241100e64516d6af6d7b0ae2bed1d263d26f70948"}, - {file = "argcomplete-3.1.1.tar.gz", hash = "sha256:6c4c563f14f01440aaffa3eae13441c5db2357b5eec639abe7c0b15334627dff"}, -] -asgiref = [ - {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, - {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, -] -asn1crypto = [ - {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, - {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, -] -astatine = [ - {file = "astatine-0.3.3-py3-none-any.whl", hash = "sha256:6d8c914f01fbea252cb8f31563f2e766a9ab03c02b9bcc37d18f7d9138828401"}, - {file = "astatine-0.3.3.tar.gz", hash = "sha256:0c58a7844b5890ff16da07dbfeb187341d8324cb4378940f89d795cbebebce08"}, -] -asttokens = [ - {file = "asttokens-2.3.0-py2.py3-none-any.whl", hash = "sha256:bef1a51bc256d349e9f94e7e40e44b705ed1162f55294220dd561d24583d9877"}, - {file = "asttokens-2.3.0.tar.gz", hash = "sha256:2552a88626aaa7f0f299f871479fc755bd4e7c11e89078965e928fb7bb9a6afe"}, -] -astunparse = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] -async-timeout = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] -attrs = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, -] -authlib = [ - {file = "Authlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:c88984ea00149a90e3537c964327da930779afa4564e354edfd98410bea01911"}, - {file = "Authlib-1.2.1.tar.gz", hash = "sha256:421f7c6b468d907ca2d9afede256f068f87e34d23dd221c07d13d4c234726afb"}, -] -azure-core = [ - {file = "azure-core-1.29.3.tar.gz", hash = "sha256:c92700af982e71c8c73de9f4c20da8b3f03ce2c22d13066e4d416b4629c87903"}, - {file = "azure_core-1.29.3-py3-none-any.whl", hash = "sha256:f8b2910f92b66293d93bd00564924ad20ad48f4a1e150577cf18d1e7d4f9263c"}, -] -azure-datalake-store = [ - {file = "azure-datalake-store-0.0.53.tar.gz", hash = "sha256:05b6de62ee3f2a0a6e6941e6933b792b800c3e7f6ffce2fc324bc19875757393"}, - {file = "azure_datalake_store-0.0.53-py2.py3-none-any.whl", hash = "sha256:a30c902a6e360aa47d7f69f086b426729784e71c536f330b691647a51dc42b2b"}, -] -azure-identity = [ - {file = "azure-identity-1.14.0.zip", hash = "sha256:72441799f8c5c89bfe21026965e266672a7c5d050c2c65119ef899dd5362e2b1"}, - {file = "azure_identity-1.14.0-py3-none-any.whl", hash = "sha256:edabf0e010eb85760e1dd19424d5e8f97ba2c9caff73a16e7b30ccbdbcce369b"}, -] -azure-storage-blob = [ - {file = "azure-storage-blob-12.17.0.zip", hash = "sha256:c14b785a17050b30fc326a315bdae6bc4a078855f4f94a4c303ad74a48dc8c63"}, - {file = "azure_storage_blob-12.17.0-py3-none-any.whl", hash = "sha256:0016e0c549a80282d7b4920c03f2f4ba35c53e6e3c7dbcd2a4a8c8eb3882c1e7"}, -] -babel = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, -] -backoff = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] -backports-zoneinfo = [ - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, - {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, -] -bandit = [ - {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, - {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, -] -black = [ - {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, - {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, - {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, - {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, - {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, - {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, - {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, - {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, - {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, - {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, - {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, -] -blessed = [ - {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, - {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, -] -blinker = [ - {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, - {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, -] -boto3 = [ - {file = "boto3-1.34.34-py3-none-any.whl", hash = "sha256:33a8b6d9136fa7427160edb92d2e50f2035f04e9d63a2d1027349053e12626aa"}, - {file = "boto3-1.34.34.tar.gz", hash = "sha256:b2f321e20966f021ec800b7f2c01287a3dd04fc5965acdfbaa9c505a24ca45d1"}, -] -boto3-stubs = [ - {file = "boto3-stubs-1.28.40.tar.gz", hash = "sha256:76079a82f199087319762c931f13506e02129132e80257dab0888d3da7dc11c7"}, - {file = "boto3_stubs-1.28.40-py3-none-any.whl", hash = "sha256:bd1d1cbdcbf18902a090d4a746cdecef2a7ebe31cf9a474bbe407d57eaa79a6a"}, -] -botocore = [ - {file = "botocore-1.34.34-py3-none-any.whl", hash = "sha256:cd060b0d88ebb2b893f1411c1db7f2ba66cc18e52dcc57ad029564ef5fec437b"}, - {file = "botocore-1.34.34.tar.gz", hash = "sha256:54093dc97372bb7683f5c61a279aa8240408abf3b2cc494ae82a9a90c1b784b5"}, -] -botocore-stubs = [ - {file = "botocore_stubs-1.31.40-py3-none-any.whl", hash = "sha256:aab534d7e7949cd543bc9b2fadc1a36712033cb00e6f31e2475eefe8486d19ae"}, - {file = "botocore_stubs-1.31.40.tar.gz", hash = "sha256:2001a253daf4ae2e171e6137b9982a00a7fbfc7a53449a16856dc049e7cd5214"}, -] -cachelib = [ - {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, - {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, -] -cachetools = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, -] -certifi = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, -] -cffi = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] -chardet = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] -charset-normalizer = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] -click = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] -clickclick = [ - {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, - {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -coloredlogs = [ - {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, - {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, -] -colorlog = [ - {file = "colorlog-4.8.0-py2.py3-none-any.whl", hash = "sha256:3dd15cb27e8119a24c1a7b5c93f9f3b455855e0f73993b1c25921b2f646f1dcd"}, - {file = "colorlog-4.8.0.tar.gz", hash = "sha256:59b53160c60902c405cdec28d38356e09d40686659048893e026ecbd589516b1"}, -] -configupdater = [ - {file = "ConfigUpdater-3.1.1-py2.py3-none-any.whl", hash = "sha256:805986dbeba317886c7a8d348b2e34986dc9e3128cd3761ecc35decbd372b286"}, - {file = "ConfigUpdater-3.1.1.tar.gz", hash = "sha256:46f0c74d73efa723776764b43c9739f68052495dd3d734319c1d0eb58511f15b"}, -] -connectorx = [ - {file = "connectorx-0.3.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:98274242c64a2831a8b1c86e0fa2c46a557dd8cbcf00c3adcf5a602455fb02d7"}, - {file = "connectorx-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2b11ba49efd330a7348bef3ce09c98218eea21d92a12dd75cd8f0ade5c99ffc"}, - {file = "connectorx-0.3.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3f6431a30304271f9137bd7854d2850231041f95164c6b749d9ede4c0d92d10c"}, - {file = "connectorx-0.3.2-cp310-none-win_amd64.whl", hash = "sha256:b370ebe8f44d2049254dd506f17c62322cc2db1b782a57f22cce01ddcdcc8fed"}, - {file = "connectorx-0.3.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:d5277fc936a80da3d1dcf889020e45da3493179070d9be8a47500c7001fab967"}, - {file = "connectorx-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cc6c963237c3d3b02f7dcd47e1be9fc6e8b93ef0aeed8694f65c62b3c4688a1"}, - {file = "connectorx-0.3.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:9403902685b3423cba786db01a36f36efef90ae3d429e45b74dadb4ae9e328dc"}, - {file = "connectorx-0.3.2-cp311-none-win_amd64.whl", hash = "sha256:6b5f518194a2cf12d5ad031d488ded4e4678eff3b63551856f2a6f1a83197bb8"}, - {file = "connectorx-0.3.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:a5602ae0531e55c58af8cfca92b8e9454fc1ccd82c801cff8ee0f17c728b4988"}, - {file = "connectorx-0.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c5959bfb4a049bb8ce1f590b5824cd1105460b6552ffec336c4bd740eebd5bd"}, - {file = "connectorx-0.3.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c4387bb27ba3acde0ab6921fdafa3811e09fce0db3d1f1ede8547d9de3aab685"}, - {file = "connectorx-0.3.2-cp38-none-win_amd64.whl", hash = "sha256:4b1920c191be9a372629c31c92d5f71fc63f49f283e5adfc4111169de40427d9"}, - {file = "connectorx-0.3.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4473fc06ac3618c673cea63a7050e721fe536782d5c1b6e433589c37a63de704"}, - {file = "connectorx-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4009b16399457340326137a223921a24e3e166b45db4dbf3ef637b9981914dc2"}, - {file = "connectorx-0.3.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:74f5b93535663cf47f9fc3d7964f93e652c07003fa71c38d7a68f42167f54bba"}, - {file = "connectorx-0.3.2-cp39-none-win_amd64.whl", hash = "sha256:0b80acca13326856c14ee726b47699011ab1baa10897180240c8783423ca5e8c"}, -] -connexion = [ - {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, - {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, -] -cron-descriptor = [ - {file = "cron_descriptor-1.4.0.tar.gz", hash = "sha256:b6ff4e3a988d7ca04a4ab150248e9f166fb7a5c828a85090e75bcc25aa93b4dd"}, -] -croniter = [ - {file = "croniter-1.4.1-py2.py3-none-any.whl", hash = "sha256:9595da48af37ea06ec3a9f899738f1b2c1c13da3c38cea606ef7cd03ea421128"}, - {file = "croniter-1.4.1.tar.gz", hash = "sha256:1a6df60eacec3b7a0aa52a8f2ef251ae3dd2a7c7c8b9874e73e791636d55a361"}, -] -cryptography = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] -databind-core = [ - {file = "databind.core-4.4.0-py3-none-any.whl", hash = "sha256:3c8a4d9abc93e158af9931d8cec389ddfc0514e02aec03b397948d243db11881"}, - {file = "databind.core-4.4.0.tar.gz", hash = "sha256:715d485e934c073f819f0250bbfcaf59c1319f83427365bc7cfd4c347f87576d"}, -] -databind-json = [ - {file = "databind.json-4.4.0-py3-none-any.whl", hash = "sha256:df8874118cfba6fd0e77ec3d41a87e04e26034bd545230cab0db1fe904bf1b09"}, - {file = "databind.json-4.4.0.tar.gz", hash = "sha256:4356afdf0aeefcc053eda0888650c59cc558be2686f08a58324d675ccd023586"}, -] -databricks-sdk = [ - {file = "databricks-sdk-0.17.0.tar.gz", hash = "sha256:0a1baa6783aba9b034b9a017da8d0cf839ec61ae8318792b78bfb3db0374dd9c"}, - {file = "databricks_sdk-0.17.0-py3-none-any.whl", hash = "sha256:ad90e01c7b1a9d60a3de6a35606c79ac982e8972d3ad3ff89c251c24439c8bb9"}, -] -databricks-sql-connector = [ - {file = "databricks_sql_connector-2.9.3-py3-none-any.whl", hash = "sha256:e37b5aa8bea22e84a9920e87ad9ba6cafbe656008c180a790baa53b711dd9889"}, - {file = "databricks_sql_connector-2.9.3.tar.gz", hash = "sha256:09a1686de3470091e78640de276053d4e18f8c03ba3627ed45b368f78bf87db9"}, -] -dbt-athena-community = [ - {file = "dbt-athena-community-1.7.1.tar.gz", hash = "sha256:02c7bc461628e2adbfaf9d3f51fbe9a5cb5e06ee2ea8329259758518ceafdc12"}, - {file = "dbt_athena_community-1.7.1-py3-none-any.whl", hash = "sha256:2a376fa128e2bd98cb774fcbf718ebe4fbc9cac7857aa037b9e36bec75448361"}, -] -dbt-bigquery = [ - {file = "dbt-bigquery-1.7.2.tar.gz", hash = "sha256:27c7f492f65ab5d1d43432a4467a436fc3637e3cb72c5b4ab07ddf7573c43596"}, - {file = "dbt_bigquery-1.7.2-py3-none-any.whl", hash = "sha256:75015755363d9e8b8cebe190d59a5e08375032b37bcfec41ec8753e7dea29f6e"}, -] -dbt-core = [ - {file = "dbt-core-1.7.4.tar.gz", hash = "sha256:769b95949210cb0d1eafdb7be48b01e59984650403f86510fdee65bd0f70f76d"}, - {file = "dbt_core-1.7.4-py3-none-any.whl", hash = "sha256:50050ae44fe9bad63e1b639810ed3629822cdc7a2af0eff6e08461c94c4527c0"}, -] -dbt-databricks = [ - {file = "dbt-databricks-1.7.3.tar.gz", hash = "sha256:045e26240c825342259a59004c2e35e7773b0b6cbb255e6896bd46d3810f9607"}, - {file = "dbt_databricks-1.7.3-py3-none-any.whl", hash = "sha256:7c2b7bd7228a401d8262781749fc496c825fe6050e661e5ab3f1c66343e311cc"}, -] -dbt-duckdb = [ - {file = "dbt-duckdb-1.7.1.tar.gz", hash = "sha256:e59b3e58d7a461988d000892b75ce95245cdf899c847e3a430eb2e9e10e63bb9"}, - {file = "dbt_duckdb-1.7.1-py3-none-any.whl", hash = "sha256:bd75b1a72924b942794d0c3293a1159a01f21ab9d82c9f18b22c253dedad101a"}, -] -dbt-extractor = [ - {file = "dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3b91e6106b967d908b34f83929d3f50ee2b498876a1be9c055fe060ed728c556"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3614ce9f83ae4cd0dc95f77730034a793a1c090a52dcf698ba1c94050afe3a8b"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ea4edf33035d0a060b1e01c42fb2d99316457d44c954d6ed4eed9f1948664d87"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b9bf50eb062b4344d9546fe42038996c6e7e7daa10724aa955d64717260e5d"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c0ce901d4ebf0664977e4e1cbf596d4afc6c1339fcc7d2cf67ce3481566a626f"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cbe338b76e9ffaa18275456e041af56c21bb517f6fbda7a58308138703da0996"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b25fa7a276ab26aa2d70ff6e0cf4cfb1490d7831fb57ee1337c24d2b0333b84"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5651e458be910ff567c0da3ea2eb084fd01884cc88888ac2cf1e240dcddacc2"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62e4f040fd338b652683421ce48e903812e27fd6e7af58b1b70a4e1f9f2c79e3"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91e25ad78f1f4feadd27587ebbcc46ad909cfad843118908f30336d08d8400ca"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:cdf9938b36cd098bcdd80f43dc03864da3f69f57d903a9160a32236540d4ddcd"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:475e2c05b17eb4976eff6c8f7635be42bec33f15a74ceb87a40242c94a99cebf"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:100453ba06e169cbdb118234ab3f06f6722a2e0e316089b81c88dea701212abc"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-win32.whl", hash = "sha256:6916aae085fd5f2af069fd6947933e78b742c9e3d2165e1740c2e28ae543309a"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-win_amd64.whl", hash = "sha256:eecc08f3743e802a8ede60c89f7b2bce872acc86120cbc0ae7df229bb8a95083"}, - {file = "dbt_extractor-0.5.1.tar.gz", hash = "sha256:cd5d95576a8dea4190240aaf9936a37fd74b4b7913ca69a3c368fc4472bb7e13"}, -] -dbt-postgres = [ - {file = "dbt-postgres-1.7.4.tar.gz", hash = "sha256:16185b8de36d1a2052a2e4b85512306ab55085b1ea323a353d0dc3628473208d"}, - {file = "dbt_postgres-1.7.4-py3-none-any.whl", hash = "sha256:d414b070ca5e48925ea9ab12706bbb9e2294f7d4509c28e7af42268596334044"}, -] -dbt-redshift = [ - {file = "dbt-redshift-1.7.1.tar.gz", hash = "sha256:6da69a83038d011570d131b85171842d0858a46bca3757419ae193b5724a2119"}, - {file = "dbt_redshift-1.7.1-py3-none-any.whl", hash = "sha256:2a48b9424934f5445e4285740ebe512afaa75882138121536ccc21d027ef62f2"}, -] -dbt-semantic-interfaces = [ - {file = "dbt_semantic_interfaces-0.4.3-py3-none-any.whl", hash = "sha256:af6ab8509da81ae5f5f1d5631c9761cccaed8cd5311d4824a8d4168ecd0f2093"}, - {file = "dbt_semantic_interfaces-0.4.3.tar.gz", hash = "sha256:9a46d07ad022a4c48783565a776ebc6f1d19e0412e70c4759bc9d7bba461ea1c"}, -] -dbt-snowflake = [ - {file = "dbt-snowflake-1.7.1.tar.gz", hash = "sha256:842a9e87b9e2d999e3bc27aaa369398a4d02bb3f8bb7447aa6151204d4eb90f0"}, - {file = "dbt_snowflake-1.7.1-py3-none-any.whl", hash = "sha256:32ef8733f67dcf4eb594d1b80852ef0b67e920f25bb8a2953031a3868a8d2b3e"}, -] -dbt-spark = [ - {file = "dbt-spark-1.7.1.tar.gz", hash = "sha256:a10e5d1bfdb2ca98e7ae2badd06150e2695d9d4fa18ae2354ed5bd093d77f947"}, - {file = "dbt_spark-1.7.1-py3-none-any.whl", hash = "sha256:99b5002edcdb82058a3b0ad33eb18b91a4bdde887d94855e8bd6f633d78837dc"}, -] -decopatch = [ - {file = "decopatch-1.4.10-py2.py3-none-any.whl", hash = "sha256:e151f7f93de2b1b3fd3f3272dcc7cefd1a69f68ec1c2d8e288ecd9deb36dc5f7"}, - {file = "decopatch-1.4.10.tar.gz", hash = "sha256:957f49c93f4150182c23f8fb51d13bb3213e0f17a79e09c8cca7057598b55720"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -deprecated = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] -diff-cover = [ - {file = "diff_cover-7.7.0-py3-none-any.whl", hash = "sha256:bf86f32ec999f9a9e79bf24969f7127ea7b4e55c3ef3cd9300feb13188c89736"}, - {file = "diff_cover-7.7.0.tar.gz", hash = "sha256:60614cf7e722cf7fb1bde497afac0b514294e1e26534449622dac4da296123fb"}, -] -dill = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] -dnspython = [ - {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, - {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, -] -docspec = [ - {file = "docspec-2.2.1-py3-none-any.whl", hash = "sha256:7538f750095a9688c6980ff9a4e029a823a500f64bd00b6b4bdb27951feb31cb"}, - {file = "docspec-2.2.1.tar.gz", hash = "sha256:4854e77edc0e2de40e785e57e95880f7095a05fe978f8b54cef7a269586e15ff"}, -] -docspec-python = [ - {file = "docspec_python-2.2.1-py3-none-any.whl", hash = "sha256:76ac41d35a8face35b2d766c2e8a416fb8832359785d396f0d53bcb00f178e54"}, - {file = "docspec_python-2.2.1.tar.gz", hash = "sha256:c41b850b4d6f4de30999ea6f82c9cdb9183d9bcba45559ee9173d3dab7281559"}, -] -docstring-parser = [ - {file = "docstring_parser-0.11.tar.gz", hash = "sha256:93b3f8f481c7d24e37c5d9f30293c89e2933fa209421c8abd731dd3ef0715ecb"}, -] -docutils = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, -] -domdf-python-tools = [ - {file = "domdf_python_tools-3.6.1-py3-none-any.whl", hash = "sha256:e18158460850957f18e740eb94ede56f580ddb0cb162ab9d9834ed8bbb1b6431"}, - {file = "domdf_python_tools-3.6.1.tar.gz", hash = "sha256:acc04563d23bce4d437dd08af6b9bea788328c412772a044d8ca428a7ad861be"}, -] -duckdb = [ - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, - {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, - {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, - {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, - {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, - {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, - {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, - {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, - {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, - {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, - {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, - {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, - {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, - {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, - {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, - {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, - {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, - {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, - {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd0ffb3fddef0f72a150e4d76e10942a84a1a0447d10907df1621b90d6668060"}, - {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f3d709d5c7c1a12b5e10d0b05fa916c670cd2b50178e3696faa0cc16048a1745"}, - {file = "duckdb-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9114aa22ec5d591a20ce5184be90f49d8e5b5348ceaab21e102c54560d07a5f8"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a37877efadf39caf7cadde0f430fedf762751b9c54750c821e2f1316705a21"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87cbc9e1d9c3fc9f14307bea757f99f15f46843c0ab13a6061354410824ed41f"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0bfec79fed387201550517d325dff4fad2705020bc139d936cab08b9e845662"}, - {file = "duckdb-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5622134d2d9796b15e09de810e450859d4beb46d9b861357ec9ae40a61b775c"}, - {file = "duckdb-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:089ee8e831ccaef1b73fc89c43b661567175eed0115454880bafed5e35cda702"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a05af63747f1d7021995f0811c333dee7316cec3b06c0d3e4741b9bdb678dd21"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:072d6eba5d8a59e0069a8b5b4252fed8a21f9fe3f85a9129d186a39b3d0aea03"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a77b85668f59b919042832e4659538337f1c7f197123076c5311f1c9cf077df7"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a666f1d2da65d03199a977aec246920920a5ea1da76b70ae02bd4fb1ffc48c"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec76a4262b783628d26612d184834852d9c92fb203e91af789100c17e3d7173"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009dd9d2cdbd3b061a9efbdfc79f2d1a8377bcf49f1e5f430138621f8c083a6c"}, - {file = "duckdb-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:878f06766088090dad4a2e5ee0081555242b2e8dcb29415ecc97e388cf0cf8d8"}, - {file = "duckdb-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:713ff0a1fb63a6d60f454acf67f31656549fb5d63f21ac68314e4f522daa1a89"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c0ee450dfedfb52dd4957244e31820feef17228da31af6d052979450a80fd19"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ff79b2ea9994398b545c0d10601cd73565fbd09f8951b3d8003c7c5c0cebc7cb"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bdf1aa71b924ef651062e6b8ff9981ad85bec89598294af8a072062c5717340"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0265bbc8216be3ced7b377ba8847128a3fc0ef99798a3c4557c1b88e3a01c23"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d418a315a07707a693bd985274c0f8c4dd77015d9ef5d8d3da4cc1942fd82e0"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2828475a292e68c71855190b818aded6bce7328f79e38c04a0c75f8f1c0ceef0"}, - {file = "duckdb-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3aaeaae2eba97035c65f31ffdb18202c951337bf2b3d53d77ce1da8ae2ecf51"}, - {file = "duckdb-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c51790aaaea97d8e4a58a114c371ed8d2c4e1ca7cbf29e3bdab6d8ccfc5afc1e"}, - {file = "duckdb-0.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8af1ae7cc77a12206b6c47ade191882cc8f49f750bb3e72bb86ac1d4fa89926a"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4f7e8e8dc0e376aeb280b83f2584d0e25ec38985c27d19f3107b2edc4f4a97"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae942a79fad913defa912b56483cd7827a4e7721f4ce4bc9025b746ecb3c89"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01b57802898091455ca2a32c1335aac1e398da77c99e8a96a1e5de09f6a0add9"}, - {file = "duckdb-0.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52e1ad4a55fa153d320c367046b9500578192e01c6d04308ba8b540441736f2c"}, - {file = "duckdb-0.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:904c47d04095af745e989c853f0bfc0776913dfc40dfbd2da7afdbbb5f67fed0"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:184ae7ea5874f3b8fa51ab0f1519bdd088a0b78c32080ee272b1d137e2c8fd9c"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd33982ecc9bac727a032d6cedced9f19033cbad56647147408891eb51a6cb37"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f59bf0949899105dd5f8864cb48139bfb78454a8c017b8258ba2b5e90acf7afc"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:395f3b18948001e35dceb48a4423d574e38656606d033eef375408b539e7b076"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b8eb2b803be7ee1df70435c33b03a4598cdaf676cd67ad782b288dcff65d781"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31b2ddd331801064326c8e3587a4db8a31d02aef11332c168f45b3bd92effb41"}, - {file = "duckdb-0.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8b89e76a041424b8c2026c5dc1f74b53fbbc6c6f650d563259885ab2e7d093d"}, - {file = "duckdb-0.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:79084a82f16c0a54f6bfb7ded5600400c2daa90eb0d83337d81a56924eaee5d4"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79799b3a270dcd9070f677ba510f1e66b112df3068425691bac97c5e278929c7"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8fc394bfe3434920cdbcfbdd0ac3ba40902faa1dbda088db0ba44003a45318a"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c116605551b4abf5786243a59bcef02bd69cc51837d0c57cafaa68cdc428aa0c"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3191170c3b0a43b0c12644800326f5afdea00d5a4621d59dbbd0c1059139e140"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fee69a50eb93c72dc77e7ab1fabe0c38d21a52c5da44a86aa217081e38f9f1bd"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5f449e87dacb16b0d145dbe65fa6fdb5a55b2b6911a46d74876e445dd395bac"}, - {file = "duckdb-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4487d0df221b17ea4177ad08131bc606b35f25cfadf890987833055b9d10cdf6"}, - {file = "duckdb-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:c099ae2ff8fe939fda62da81704f91e2f92ac45e48dc0e37c679c9d243d01e65"}, - {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, -] -email-validator = [ - {file = "email_validator-1.3.1-py2.py3-none-any.whl", hash = "sha256:49a72f5fa6ed26be1c964f0567d931d10bf3fdeeacdf97bc26ef1cd2a44e0bda"}, - {file = "email_validator-1.3.1.tar.gz", hash = "sha256:d178c5c6fa6c6824e9b04f199cf23e79ac15756786573c190d2ad13089411ad2"}, -] -enlighten = [ - {file = "enlighten-1.11.2-py2.py3-none-any.whl", hash = "sha256:98c9eb20e022b6a57f1c8d4f17e16760780b6881e6d658c40f52d21255ea45f3"}, - {file = "enlighten-1.11.2.tar.gz", hash = "sha256:9284861dee5a272e0e1a3758cd3f3b7180b1bd1754875da76876f2a7f46ccb61"}, -] -et-xmlfile = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, -] -exceptiongroup = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, -] -fastembed = [ - {file = "fastembed-0.1.1-py3-none-any.whl", hash = "sha256:131413ae52cd72f4c8cced7a675f8269dbfd1a852abade3c815e265114bcc05a"}, - {file = "fastembed-0.1.1.tar.gz", hash = "sha256:f7e524ee4f74bb8aad16be5b687d1f77f608d40e96e292c87881dc36baf8f4c7"}, -] -filelock = [ - {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, - {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, -] -flake8 = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] -flake8-bugbear = [ - {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, - {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, -] -flake8-builtins = [ - {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, - {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, -] -flake8-encodings = [ - {file = "flake8_encodings-0.5.0.post1-py3-none-any.whl", hash = "sha256:d2fecca0e89ba09c86e5d61cf6bdb1b337f0d74746aac67bbcf0c517b4cb6cba"}, - {file = "flake8_encodings-0.5.0.post1.tar.gz", hash = "sha256:082c0163325c85b438a8106e876283b5ed3cbfc53e68d89130d70be8be4c9977"}, -] -flake8-helper = [ - {file = "flake8_helper-0.2.1-py3-none-any.whl", hash = "sha256:9123cdf351ad32ee8a51b85036052302c478122d62fb512c0773e111b3d05241"}, - {file = "flake8_helper-0.2.1.tar.gz", hash = "sha256:479f86d1c52df8e49ff876ecd3873242699f93eeece7e6675cdca9c37c9b0a16"}, -] -flake8-tidy-imports = [ - {file = "flake8_tidy_imports-4.10.0-py3-none-any.whl", hash = "sha256:b0387fb2ea200441bd142309e716fb7b8f4b0937bdf5f8b7c0c118a5f5e2b8ed"}, - {file = "flake8_tidy_imports-4.10.0.tar.gz", hash = "sha256:bd6cf86465402d2b86903009b748d85a628e599e17b76e810c9857e3a2815173"}, -] -flask = [ - {file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"}, - {file = "Flask-2.2.5.tar.gz", hash = "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"}, -] -flask-appbuilder = [ - {file = "Flask-AppBuilder-4.3.10.tar.gz", hash = "sha256:4173c878e56b81c6acac5e3c80c133f4183f43442fd944552bd9f4023f5baceb"}, - {file = "Flask_AppBuilder-4.3.10-py3-none-any.whl", hash = "sha256:c0af506e1a68e7ee14f26a16fda829f1a14f8343654c30bdbb1351d23c545df9"}, -] -flask-babel = [ - {file = "Flask-Babel-2.0.0.tar.gz", hash = "sha256:f9faf45cdb2e1a32ea2ec14403587d4295108f35017a7821a2b1acb8cfd9257d"}, - {file = "Flask_Babel-2.0.0-py3-none-any.whl", hash = "sha256:e6820a052a8d344e178cdd36dd4bb8aea09b4bda3d5f9fa9f008df2c7f2f5468"}, -] -flask-caching = [ - {file = "Flask-Caching-2.0.2.tar.gz", hash = "sha256:24b60c552d59a9605cc1b6a42c56cdb39a82a28dab4532bbedb9222ae54ecb4e"}, - {file = "Flask_Caching-2.0.2-py3-none-any.whl", hash = "sha256:19571f2570e9b8dd9dd9d2f49d7cbee69c14ebe8cc001100b1eb98c379dd80ad"}, -] -flask-jwt-extended = [ - {file = "Flask-JWT-Extended-4.5.2.tar.gz", hash = "sha256:ba56245ba43b71c8ae936784b867625dce8b9956faeedec2953222e57942fb0b"}, - {file = "Flask_JWT_Extended-4.5.2-py2.py3-none-any.whl", hash = "sha256:e0ef23d8c863746bd141046167073699e1a7b03c97169cbba70f05b8d9cd6b9e"}, -] -flask-limiter = [ - {file = "Flask-Limiter-3.5.0.tar.gz", hash = "sha256:13a3491b994c49f7cb4706587a38ca47e8162b576530472df38be68104f299c0"}, - {file = "Flask_Limiter-3.5.0-py3-none-any.whl", hash = "sha256:dbda4174f44e6cb858c6eb75e7488186f2977dd5d33d7028ba1aabf179de1bee"}, -] -flask-login = [ - {file = "Flask-Login-0.6.2.tar.gz", hash = "sha256:c0a7baa9fdc448cdd3dd6f0939df72eec5177b2f7abe6cb82fc934d29caac9c3"}, - {file = "Flask_Login-0.6.2-py3-none-any.whl", hash = "sha256:1ef79843f5eddd0f143c2cd994c1b05ac83c0401dc6234c143495af9a939613f"}, -] -flask-session = [ - {file = "Flask-Session-0.5.0.tar.gz", hash = "sha256:190875e6aebf2953c6803d42379ef3b934bc209ef8ef006f97aecb08f5aaeb86"}, - {file = "flask_session-0.5.0-py3-none-any.whl", hash = "sha256:1619bcbc16f04f64e90f8e0b17145ba5c9700090bb1294e889956c1282d58631"}, -] -flask-sqlalchemy = [ - {file = "Flask-SQLAlchemy-2.5.1.tar.gz", hash = "sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912"}, - {file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"}, -] -flask-wtf = [ - {file = "Flask-WTF-1.1.1.tar.gz", hash = "sha256:41c4244e9ae626d63bed42ae4785b90667b885b1535d5a4095e1f63060d12aa9"}, - {file = "Flask_WTF-1.1.1-py3-none-any.whl", hash = "sha256:7887d6f1ebb3e17bf648647422f0944c9a469d0fcf63e3b66fb9a83037e38b2c"}, -] -flatbuffers = [ - {file = "flatbuffers-23.5.26-py2.py3-none-any.whl", hash = "sha256:c0ff356da363087b915fde4b8b45bdda73432fc17cddb3c8157472eab1422ad1"}, - {file = "flatbuffers-23.5.26.tar.gz", hash = "sha256:9ea1144cac05ce5d86e2859f431c6cd5e66cd9c78c558317c7955fb8d4c78d89"}, -] -frozenlist = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, -] -fsspec = [ - {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"}, - {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"}, -] -future = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, -] -gcsfs = [ - {file = "gcsfs-2024.2.0-py2.py3-none-any.whl", hash = "sha256:20bf70cc81d580474dd299d55e1ffcf8b3e81721aeb562e148ca0a3c900d0421"}, - {file = "gcsfs-2024.2.0.tar.gz", hash = "sha256:f7cffd7cae2fb50c56ef883f8aef9792be045b5059f06c1902c3a6151509f506"}, -] -gitdb = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] -gitpython = [ - {file = "GitPython-3.1.34-py3-none-any.whl", hash = "sha256:5d3802b98a3bae1c2b8ae0e1ff2e4aa16bcdf02c145da34d092324f599f01395"}, - {file = "GitPython-3.1.34.tar.gz", hash = "sha256:85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd"}, -] -giturlparse = [ - {file = "giturlparse-0.11.1-py2.py3-none-any.whl", hash = "sha256:6422f25c8ca563e1a3cb6b85862e48614be804cd1334e6d84be5630eb26b343f"}, - {file = "giturlparse-0.11.1.tar.gz", hash = "sha256:cdbe0c062096c69e00f08397826dddebc1f73bc15b793994579c13aafc70c990"}, -] -google-api-core = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, -] -google-api-python-client = [ - {file = "google-api-python-client-2.97.0.tar.gz", hash = "sha256:48277291894876a1ca7ed4127e055e81f81e6343ced1b544a7200ae2c119dcd7"}, - {file = "google_api_python_client-2.97.0-py2.py3-none-any.whl", hash = "sha256:5215f4cd577753fc4192ccfbe0bb8b55d4bb5fd68fa6268ac5cf271b6305de31"}, -] -google-auth = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, -] -google-auth-httplib2 = [ - {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, - {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, -] -google-auth-oauthlib = [ - {file = "google-auth-oauthlib-1.0.0.tar.gz", hash = "sha256:e375064964820b47221a7e1b7ee1fd77051b6323c3f9e3e19785f78ab67ecfc5"}, - {file = "google_auth_oauthlib-1.0.0-py2.py3-none-any.whl", hash = "sha256:95880ca704928c300f48194d1770cf5b1462835b6e49db61445a520f793fd5fb"}, -] -google-cloud-bigquery = [ - {file = "google-cloud-bigquery-3.11.4.tar.gz", hash = "sha256:697df117241a2283bcbb93b21e10badc14e51c9a90800d2a7e1a3e1c7d842974"}, - {file = "google_cloud_bigquery-3.11.4-py2.py3-none-any.whl", hash = "sha256:5fa7897743a0ed949ade25a0942fc9e7557d8fce307c6f8a76d1b604cf27f1b1"}, -] -google-cloud-core = [ - {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, - {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, -] -google-cloud-dataproc = [ - {file = "google-cloud-dataproc-5.4.3.tar.gz", hash = "sha256:d9c77c52aa5ddf52ae657736dbfb5312402933f72bab8480fc2d2afe98697402"}, - {file = "google_cloud_dataproc-5.4.3-py2.py3-none-any.whl", hash = "sha256:9cfff56cb53621cdffd0a3d6b10701e886e0a8ad54891e6c223eb67c0ff753ad"}, -] -google-cloud-storage = [ - {file = "google-cloud-storage-2.10.0.tar.gz", hash = "sha256:934b31ead5f3994e5360f9ff5750982c5b6b11604dc072bc452c25965e076dc7"}, - {file = "google_cloud_storage-2.10.0-py2.py3-none-any.whl", hash = "sha256:9433cf28801671de1c80434238fb1e7e4a1ba3087470e90f70c928ea77c2b9d7"}, -] -google-crc32c = [ - {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, -] -google-re2 = [ - {file = "google-re2-1.1.tar.gz", hash = "sha256:d3a9467ee52b46ac77ca928f6d0cbeaccfd92f03ca0f0f65b9df6a95184f3a1c"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:874d2e36dfa506b03d4f9c4aef1701a65304f4004c96c7edac7d8aea08fe193e"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b66eb84850afdce09aabca40bcd6f2a0e96178a1b4990d555678edb1f59bf255"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c461640a07db26dc2b51f43de607b7520e7debaf4f6a000f796a3c0196ca52af"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7f9ba69eaee6e7a9f5ddfb919bf1a866af14a18b26a179e3fb1a6fe3d0cbf349"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:f95cf16739cc3ea63728366881221b119f2322b4b739b7da6522d45a68792cea"}, - {file = "google_re2-1.1-1-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:9fb56a41250191298e6a2859b0fdea1e83330c9870fe8d84e5836c506ae46e96"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb22ea995564d87baf4a4bfbb3ca024be913683a710f4f0dc9c94dc663afab20"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19b3f0bfbb2a2ca58ed0aaa9356d07a5c0921383a6dbeca086b2b74472f5ee08"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34fd7f97b84af7453cf05b25adfe2491ba3cef1ca548ac2907efa63d3510954d"}, - {file = "google_re2-1.1-1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e029664192d8d30f7c977706183ef483e82ca239302272df74e01d2e22897ca"}, - {file = "google_re2-1.1-1-cp310-cp310-win32.whl", hash = "sha256:41a8f222f9839d059d37efd28e4deec203502d7e39c3759d83d6a33deadf1d2e"}, - {file = "google_re2-1.1-1-cp310-cp310-win_amd64.whl", hash = "sha256:6141d569fdf72aa693f040ba05c469036587395af07ff419b9a3c009d6ffefd3"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2d03f6aaf22788ba13a770f0d183b8eebe55545bcbb6e4c41dcccac7ded014d"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:a98f15fd9c31bf80d368698447191a2e9703880b305dbf34d9a63ce634b8a557"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:42128916cc2966623832aabbd224c88e862d1c531d6bc49ab141f565e6321a90"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6e27986a166903ad7000635f6faed8ab5072d687f822ac9f692c40b2470aebcf"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:5e9edcd743a830d0c0b2729201e42ab86fceef8f4086df65563f482e4544359e"}, - {file = "google_re2-1.1-1-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:d33145bbfd32e916f1c911cd9225be5364a36c3959742a0cc4dfc0692d6a2a5e"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b27cc2544b69a357ab2a749dc0c13a1b9055198c56f4c2c3b0f61d693f8e203"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3cdf8982b6def987e95b37984d0c1c878de32635dd78acde3273f730b69708c9"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71ac661a7365e134741fe5542f13d7ce1e6187446b96ddee4c8b7d153fc8f05a"}, - {file = "google_re2-1.1-1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:35a902ba31a71a3e9e114e44473624d9aa9f9b85ec981bfa91671aefe0ef1a6c"}, - {file = "google_re2-1.1-1-cp311-cp311-win32.whl", hash = "sha256:9469f26b485da2784c658e687a766c72e1a17b1e63b3ed24b5f64c3d19fbae3d"}, - {file = "google_re2-1.1-1-cp311-cp311-win_amd64.whl", hash = "sha256:07dd0780240ee431781119b46c3bbf76f5cef24a2cbb542f6a08c643e0a68d98"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9857dc4d69b8025057c8129e98406a24d51bdaf1b96e481dbba7e69e0ec85104"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:a6eaaa5f200022eb0bdded5949c91454fc96e1edd6f9e9a96dd1dc32c821c00e"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a32bb2afe128d90b8edc20d4f7d297f7e2753206eba92937a57e5280736eac74"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:4f2754616c61b76ab4e5a4f39892a52a00897203b859c5abd7e3c630dd883cda"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:b110f3d657e8f67a43a699d327ce47095b80180ea1118e2de44cb5c7002503d9"}, - {file = "google_re2-1.1-1-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:fd62ba2853eef65e249a9c4437a9ecac568222062bc956f0c61a3d1151a6271b"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23b50eb74dc3e1d480b04b987c61242df5dade50d08bc16e25eb3582b83fca80"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1bde89855dd5ab0811187d21eec149975510c80e865c771c883524a452445e7"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10c6cddc720151a509beb98ab310fa0cc8bcb265f83518ebf831de2c9ff73af0"}, - {file = "google_re2-1.1-1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bea09c5e8401ec50b8f211bc820ec2f0ca5e744ac67431a1b39bdacbd266553"}, - {file = "google_re2-1.1-1-cp38-cp38-win32.whl", hash = "sha256:ffa51b118037518bcdf63c7649d0b4be7071982b83f48ee3bbabf24a9cb48f8a"}, - {file = "google_re2-1.1-1-cp38-cp38-win_amd64.whl", hash = "sha256:3b47715b6d43c9351957eb5092ad0fa625d04106d81f34cb8a726c53395ad474"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:998f31bf7efbc9bb603d0c356c1c77e5331f689c71783df8e21e67bb025fc66a"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0b5f0eaab859d3ba5f462c82bf37ab56e9d37e19b40b5898c731dbe4213a85f7"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f6d591d9c4cbc7142b729ddcc3f654d059d8ebc3bc95891198808a4785a6b4d8"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:3c325c2eae197b423330a04ab62e2e1cf942676cd5560907db4d63e23ce0648a"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:1e019e8f57955806ee843254ce454249b58800a6e872b2c8e9df2ef3459de0d5"}, - {file = "google_re2-1.1-1-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:58ebbcc7ad2b639768a6bca586357291660ea40dfac83039208e5055c357513b"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:723f8553e7fc022294071f14fb7dfc7958c365dc7d4a71d4938ccd2df8c6eca4"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d81512b08e6787fc8ef29fea365d3fdbf957553a625550e1d96c36877ae30355"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c58601b155651cc572a23ee2860788c77581aad85d3567a55b89b0674702f34d"}, - {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6c9f64b9724ec38da8e514f404ac64e9a6a5e8b1d7031c2dadd05c1f4c16fd"}, - {file = "google_re2-1.1-1-cp39-cp39-win32.whl", hash = "sha256:d1b751b9ab9f8e2ab2a36d72b909281ce65f328c9115a1685acae1a2d1afd7a4"}, - {file = "google_re2-1.1-1-cp39-cp39-win_amd64.whl", hash = "sha256:ac775c75cec7069351d201da4e0fb0cae4c1c5ebecd08fa34e1be89740c1d80b"}, -] -google-resumable-media = [ - {file = "google-resumable-media-2.5.0.tar.gz", hash = "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"}, - {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"}, -] -googleapis-common-protos = [ - {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, - {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, -] -grapheme = [ - {file = "grapheme-0.6.0.tar.gz", hash = "sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca"}, -] -greenlet = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] -grpc-google-iam-v1 = [ - {file = "grpc-google-iam-v1-0.12.6.tar.gz", hash = "sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f"}, - {file = "grpc_google_iam_v1-0.12.6-py2.py3-none-any.whl", hash = "sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c"}, -] -grpcio = [ - {file = "grpcio-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6"}, - {file = "grpcio-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649"}, - {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f"}, - {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a"}, - {file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019"}, - {file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527"}, - {file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca"}, - {file = "grpcio-1.57.0-cp310-cp310-win32.whl", hash = "sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb"}, - {file = "grpcio-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56"}, - {file = "grpcio-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652"}, - {file = "grpcio-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e"}, - {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e"}, - {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b"}, - {file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e"}, - {file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d"}, - {file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a"}, - {file = "grpcio-1.57.0-cp311-cp311-win32.whl", hash = "sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe"}, - {file = "grpcio-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0"}, - {file = "grpcio-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf"}, - {file = "grpcio-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5"}, - {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2"}, - {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73"}, - {file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0"}, - {file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc"}, - {file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2"}, - {file = "grpcio-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98"}, - {file = "grpcio-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51"}, - {file = "grpcio-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0"}, - {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6"}, - {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941"}, - {file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f"}, - {file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06"}, - {file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79"}, - {file = "grpcio-1.57.0-cp38-cp38-win32.whl", hash = "sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb"}, - {file = "grpcio-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16"}, - {file = "grpcio-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b"}, - {file = "grpcio-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6"}, - {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b"}, - {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e"}, - {file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5"}, - {file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9"}, - {file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37"}, - {file = "grpcio-1.57.0-cp39-cp39-win32.whl", hash = "sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766"}, - {file = "grpcio-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056"}, - {file = "grpcio-1.57.0.tar.gz", hash = "sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613"}, -] -grpcio-status = [ - {file = "grpcio-status-1.57.0.tar.gz", hash = "sha256:b098da99df1eebe58337f8f78e50df990273ccacc1226fddeb47c590e3df9e02"}, - {file = "grpcio_status-1.57.0-py3-none-any.whl", hash = "sha256:15d6af055914ebbc4ed17e55ebfb8e6bb17a45a57fea32e6af19978fb7844690"}, -] -grpcio-tools = [ - {file = "grpcio-tools-1.57.0.tar.gz", hash = "sha256:2f16130d869ce27ecd623194547b649dd657333ec7e8644cc571c645781a9b85"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:4fb8a8468031f858381a576078924af364a08833d8f8f3237018252c4573a802"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:35bf0dad8a3562043345236c26d0053a856fb06c04d7da652f2ded914e508ae7"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:ec9aab2fb6783c7fc54bc28f58eb75f1ca77594e6b0fd5e5e7a8114a95169fe0"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf5fc0a1c23f8ea34b408b72fb0e90eec0f404ad4dba98e8f6da3c9ce34e2ed"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e69d08a515554e0cfe1ec4d31568836f4b17f0ff82294f957f629388629eb9"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c39a3656576b6fdaaf28abe0467f7a7231df4230c1bee132322dbc3209419e7f"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f64f8ab22d27d4a5693310748d35a696061c3b5c7b8c4fb4ab3b4bc1068b6b56"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-win32.whl", hash = "sha256:d2a134756f4db34759a5cc7f7e43f7eb87540b68d1cca62925593c6fb93924f7"}, - {file = "grpcio_tools-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a3d60fb8d46ede26c1907c146561b3a9caa20a7aff961bc661ef8226f85a2e9"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:aac98ecad8f7bd4301855669d42a5d97ef7bb34bec2b1e74c7a0641d47e313cf"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:cdd020cb68b51462983b7c2dfbc3eb6ede032b8bf438d4554df0c3f08ce35c76"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:f54081b08419a39221cd646363b5708857c696b3ad4784f1dcf310891e33a5f7"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed85a0291fff45b67f2557fe7f117d3bc7af8b54b8619d27bf374b5c8b7e3ca2"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e868cd6feb3ef07d4b35be104fe1fd0657db05259ff8f8ec5e08f4f89ca1191d"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dfb6f6120587b8e228a3cae5ee4985b5bdc18501bad05c49df61965dfc9d70a9"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a7ad7f328e28fc97c356d0f10fb10d8b5151bb65aa7cf14bf8084513f0b7306"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-win32.whl", hash = "sha256:9867f2817b1a0c93c523f89ac6c9d8625548af4620a7ce438bf5a76e23327284"}, - {file = "grpcio_tools-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:1f9e917a9f18087f6c14b4d4508fb94fca5c2f96852363a89232fb9b2124ac1f"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:9f2aefa8a37bd2c4db1a3f1aca11377e2766214520fb70e67071f4ff8d8b0fa5"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:850cbda0ec5d24c39e7215ede410276040692ca45d105fbbeada407fa03f0ac0"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6fa52972c9647876ea35f6dc2b51002a74ed900ec7894586cbb2fe76f64f99de"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0eea89d7542719594e50e2283f51a072978b953e8b3e9fd7c59a2c762d4c1"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3da5240211252fc70a6451fe00c143e2ab2f7bfc2445695ad2ed056b8e48d96"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a0256f8786ac9e4db618a1aa492bb3472569a0946fd3ee862ffe23196323da55"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c026bdf5c1366ce88b7bbe2d8207374d675afd3fd911f60752103de3da4a41d2"}, - {file = "grpcio_tools-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9053c2f655589545be08b9d6a673e92970173a4bf11a4b9f18cd6e9af626b587"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:81ec4dbb696e095057b2528d11a8da04be6bbe2b967fa07d4ea9ba6354338cbf"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:495e2946406963e0b9f063f76d5af0f2a19517dac2b367b5b044432ac9194296"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:7b46fc6aa8eb7edd18cafcd21fd98703cb6c09e46b507de335fca7f0161dfccb"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb81ff861692111fa81bd85f64584e624cb4013bd66fbce8a209b8893f5ce398"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a42dc220eb5305f470855c9284f4c8e85ae59d6d742cd07946b0cbe5e9ca186"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90d10d9038ba46a595a223a34f136c9230e3d6d7abc2433dbf0e1c31939d3a8b"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5bc3e6d338aefb052e19cedabe00452be46d0c10a4ed29ee77abb00402e438fe"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-win32.whl", hash = "sha256:34b36217b17b5bea674a414229913e1fd80ede328be51e1b531fcc62abd393b0"}, - {file = "grpcio_tools-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbde4004a0688400036342ff73e3706e8940483e2871547b1354d59e93a38277"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:784574709b9690dc28696617ea69352e2132352fdfc9bc89afa8e39f99ae538e"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:85ac4e62eb44428cde025fd9ab7554002315fc7880f791c553fc5a0015cc9931"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:dc771d4db5701f280957bbcee91745e0686d00ed1c6aa7e05ba30a58b02d70a1"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3ac06703c412f8167a9062eaf6099409967e33bf98fa5b02be4b4689b6bdf39"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02d78c034109f46032c7217260066d49d41e6bcaf588fa28fa40fe2f83445347"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2db25f15ed44327f2e02d0c4fe741ac966f9500e407047d8a7c7fccf2df65616"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b417c97936d94874a3ce7ed8deab910f2233e3612134507cfee4af8735c38a6"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-win32.whl", hash = "sha256:f717cce5093e6b6049d9ea6d12fdf3658efdb1a80772f7737db1f8510b876df6"}, - {file = "grpcio_tools-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:1c0e8a1a32973a5d59fbcc19232f925e5c48116e9411f788033a31c5ca5130b4"}, -] -gunicorn = [ - {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, - {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, -] -h11 = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] -h2 = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] -hexbytes = [ - {file = "hexbytes-0.3.1-py3-none-any.whl", hash = "sha256:383595ad75026cf00abd570f44b368c6cdac0c6becfae5c39ff88829877f8a59"}, - {file = "hexbytes-0.3.1.tar.gz", hash = "sha256:a3fe35c6831ee8fafd048c4c086b986075fc14fd46258fa24ecb8d65745f9a9d"}, -] -hpack = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] -httpcore = [ - {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, - {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, -] -httplib2 = [ - {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, - {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, -] -httpx = [ - {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, - {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, -] -humanfriendly = [ - {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, - {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, -] -humanize = [ - {file = "humanize-4.8.0-py3-none-any.whl", hash = "sha256:8bc9e2bb9315e61ec06bf690151ae35aeb65651ab091266941edf97c90836404"}, - {file = "humanize-4.8.0.tar.gz", hash = "sha256:9783373bf1eec713a770ecaa7c2d7a7902c98398009dfa3d8a2df91eec9311e8"}, -] -hyperframe = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -importlib-metadata = [ - {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, - {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, -] -importlib-resources = [ - {file = "importlib_resources-6.0.1-py3-none-any.whl", hash = "sha256:134832a506243891221b88b4ae1213327eea96ceb4e407a00d790bb0626f45cf"}, - {file = "importlib_resources-6.0.1.tar.gz", hash = "sha256:4359457e42708462b9626a04657c6208ad799ceb41e5c58c57ffa0e6a098a5d4"}, -] -inflection = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] -iniconfig = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] -isodate = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] -isort = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] -itsdangerous = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] -jaraco-classes = [ - {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"}, - {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"}, -] -jeepney = [ - {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, - {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -jinxed = [ - {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, - {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, -] -jmespath = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] -jsonpath-ng = [ - {file = "jsonpath-ng-1.5.3.tar.gz", hash = "sha256:a273b182a82c1256daab86a313b937059261b5c5f8c4fa3fc38b882b344dd567"}, - {file = "jsonpath_ng-1.5.3-py2-none-any.whl", hash = "sha256:f75b95dbecb8a0f3b86fd2ead21c2b022c3f5770957492b9b6196ecccfeb10aa"}, - {file = "jsonpath_ng-1.5.3-py3-none-any.whl", hash = "sha256:292a93569d74029ba75ac2dc3d3630fc0e17b2df26119a165fa1d498ca47bf65"}, -] -jsonschema = [ - {file = "jsonschema-4.19.0-py3-none-any.whl", hash = "sha256:043dc26a3845ff09d20e4420d6012a9c91c9aa8999fa184e7efcfeccb41e32cb"}, - {file = "jsonschema-4.19.0.tar.gz", hash = "sha256:6e1e7569ac13be8139b2dd2c21a55d350066ee3f80df06c608b398cdc6f30e8f"}, -] -jsonschema-specifications = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, -] -keyring = [ - {file = "keyring-24.2.0-py3-none-any.whl", hash = "sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6"}, - {file = "keyring-24.2.0.tar.gz", hash = "sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509"}, -] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, -] -leather = [ - {file = "leather-0.3.4-py2.py3-none-any.whl", hash = "sha256:5e741daee96e9f1e9e06081b8c8a10c4ac199301a0564cdd99b09df15b4603d2"}, - {file = "leather-0.3.4.tar.gz", hash = "sha256:b43e21c8fa46b2679de8449f4d953c06418666dc058ce41055ee8a8d3bb40918"}, -] -limits = [ - {file = "limits-3.6.0-py3-none-any.whl", hash = "sha256:32fe29a398352c71bc43d53773117d47e22c5ea4200aef28d3f5fdee10334cd7"}, - {file = "limits-3.6.0.tar.gz", hash = "sha256:57a9c69fd37ad1e4fa3886dff8d035227e1f6af87f47e9118627e72cf1ced3bf"}, -] -linkify-it-py = [ - {file = "linkify-it-py-2.0.2.tar.gz", hash = "sha256:19f3060727842c254c808e99d465c80c49d2c7306788140987a1a7a29b0d6ad2"}, - {file = "linkify_it_py-2.0.2-py3-none-any.whl", hash = "sha256:a3a24428f6c96f27370d7fe61d2ac0be09017be5190d68d8658233171f1b6541"}, -] -lockfile = [ - {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, - {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, -] -logbook = [ - {file = "Logbook-1.5.3-cp27-cp27m-win32.whl", hash = "sha256:56ee54c11df3377314cedcd6507638f015b4b88c0238c2e01b5eb44fd3a6ad1b"}, - {file = "Logbook-1.5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:2dc85f1510533fddb481e97677bb7bca913560862734c0b3b289bfed04f78c92"}, - {file = "Logbook-1.5.3-cp35-cp35m-win32.whl", hash = "sha256:94e2e11ff3c2304b0d09a36c6208e5ae756eb948b210e5cbd63cd8d27f911542"}, - {file = "Logbook-1.5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:97fee1bd9605f76335b169430ed65e15e457a844b2121bd1d90a08cf7e30aba0"}, - {file = "Logbook-1.5.3-cp36-cp36m-win32.whl", hash = "sha256:7c533eb728b3d220b1b5414ba4635292d149d79f74f6973b4aa744c850ca944a"}, - {file = "Logbook-1.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:e18f7422214b1cf0240c56f884fd9c9b4ff9d0da2eabca9abccba56df7222f66"}, - {file = "Logbook-1.5.3-cp37-cp37m-win32.whl", hash = "sha256:8f76a2e7b1f72595f753228732f81ce342caf03babc3fed6bbdcf366f2f20f18"}, - {file = "Logbook-1.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0cf2cdbfb65a03b5987d19109dacad13417809dcf697f66e1a7084fb21744ea9"}, - {file = "Logbook-1.5.3.tar.gz", hash = "sha256:66f454ada0f56eae43066f604a222b09893f98c1adc18df169710761b8f32fe8"}, -] -lxml = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, -] -lz4 = [ - {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, - {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, - {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, - {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, - {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, - {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, - {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, - {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, - {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, - {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, - {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, - {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, - {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, - {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, - {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, - {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, - {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, - {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, - {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, - {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, - {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, -] -makefun = [ - {file = "makefun-1.15.1-py2.py3-none-any.whl", hash = "sha256:a63cfc7b47a539c76d97bd4fdb833c7d0461e759fd1225f580cb4be6200294d4"}, - {file = "makefun-1.15.1.tar.gz", hash = "sha256:40b0f118b6ded0d8d78c78f1eb679b8b6b2462e3c1b3e05fb1b2da8cd46b48a5"}, -] -mako = [ - {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, - {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, -] -markdown = [ - {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, - {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, -] -markdown-it-py = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, -] -marshmallow = [ - {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, - {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, -] -marshmallow-oneofschema = [ - {file = "marshmallow-oneofschema-3.0.1.tar.gz", hash = "sha256:62cd2099b29188c92493c2940ee79d1bf2f2619a71721664e5a98ec2faa58237"}, - {file = "marshmallow_oneofschema-3.0.1-py2.py3-none-any.whl", hash = "sha256:bd29410a9f2f7457a2b428286e2a80ef76b8ddc3701527dc1f935a88914b02f2"}, -] -marshmallow-sqlalchemy = [ - {file = "marshmallow-sqlalchemy-0.26.1.tar.gz", hash = "sha256:d8525f74de51554b5c8491effe036f60629a426229befa33ff614c8569a16a73"}, - {file = "marshmallow_sqlalchemy-0.26.1-py2.py3-none-any.whl", hash = "sha256:ba7493eeb8669a3bf00d8f906b657feaa87a740ae9e4ecf829cfd6ddf763d276"}, -] -mashumaro = [ - {file = "mashumaro-3.11-py3-none-any.whl", hash = "sha256:8f858bdb33790db6d9f3087dce793a26d109aeae38bed3ca9c2d7f16f19db412"}, - {file = "mashumaro-3.11.tar.gz", hash = "sha256:b0b2443be4bdad29bb209d91fe4a2a918fbd7b63cccfeb457c7eeb567db02f5e"}, -] -mccabe = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] -mdit-py-plugins = [ - {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, - {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, -] -mdurl = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] -minimal-snowplow-tracker = [ +files = [ {file = "minimal-snowplow-tracker-0.0.2.tar.gz", hash = "sha256:acabf7572db0e7f5cbf6983d495eef54081f71be392330eb3aadb9ccb39daaa4"}, ] -mmh3 = [ + +[package.dependencies] +requests = ">=2.2.1,<3.0" +six = ">=1.9.0,<2.0" + +[[package]] +name = "mmh3" +version = "4.0.1" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +optional = true +python-versions = "*" +files = [ {file = "mmh3-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b719ba87232749095011d567a36a25e40ed029fc61c47e74a12416d8bb60b311"}, {file = "mmh3-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0ad423711c5096cf4a346011f3b3ec763208e4f4cc4b10ed41cad2a03dbfaed"}, {file = "mmh3-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80918e3f8ab6b717af0a388c14ffac5a89c15d827ff008c1ef545b8b32724116"}, @@ -7325,23 +4949,82 @@ mmh3 = [ {file = "mmh3-4.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:ce71856cbca9d7c74d084eeee1bc5b126ed197c1c9530a4fdb994d099b9bc4db"}, {file = "mmh3-4.0.1.tar.gz", hash = "sha256:ad8be695dc4e44a79631748ba5562d803f0ac42d36a6b97a53aca84a70809385"}, ] -more-itertools = [ + +[package.extras] +test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] + +[[package]] +name = "more-itertools" +version = "10.1.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, ] -mpmath = [ + +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = true +python-versions = "*" +files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, ] -msal = [ + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4)"] +tests = ["pytest (>=4.6)"] + +[[package]] +name = "msal" +version = "1.23.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." +optional = true +python-versions = "*" +files = [ {file = "msal-1.23.0-py2.py3-none-any.whl", hash = "sha256:3342e0837a047007f9d479e814b559c3219767453d57920dc40a31986862048b"}, {file = "msal-1.23.0.tar.gz", hash = "sha256:25c9a33acf84301f93d1fdbe9f1a9c60cd38af0d5fffdbfa378138fc7bc1e86b"}, ] -msal-extensions = [ + +[package.dependencies] +cryptography = ">=0.6,<44" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.13.2,<0.14)"] + +[[package]] +name = "msal-extensions" +version = "1.0.0" +description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." +optional = true +python-versions = "*" +files = [ {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, ] -msgpack = [ + +[package.dependencies] +msal = ">=0.4.1,<2.0.0" +portalocker = [ + {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, + {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, +] + +[[package]] +name = "msgpack" +version = "1.0.5" +description = "MessagePack serializer" +optional = false +python-versions = "*" +files = [ {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"}, {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"}, {file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"}, @@ -7406,7 +5089,14 @@ msgpack = [ {file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"}, {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, ] -multidict = [ + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, @@ -7482,7 +5172,14 @@ multidict = [ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, ] -mypy = [ + +[[package]] +name = "mypy" +version = "1.6.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, @@ -7511,47 +5208,161 @@ mypy = [ {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, ] -mypy-boto3-athena = [ + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-boto3-athena" +version = "1.28.36" +description = "Type annotations for boto3.Athena 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = true +python-versions = ">=3.7" +files = [ {file = "mypy-boto3-athena-1.28.36.tar.gz", hash = "sha256:a76df6aace3dc1d91b3f74640d617cd1b4802e5f348a22db2f16dfce0b01ee26"}, {file = "mypy_boto3_athena-1.28.36-py3-none-any.whl", hash = "sha256:b79b77df6ba30c55ff2f1f8b36de410f537c8c978d892e958b4c5e165797915a"}, ] -mypy-boto3-glue = [ + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-glue" +version = "1.28.36" +description = "Type annotations for boto3.Glue 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = true +python-versions = ">=3.7" +files = [ {file = "mypy-boto3-glue-1.28.36.tar.gz", hash = "sha256:161771252bb6a220a0bfd8e6ad71da8548599c611f95fe8a94846f4a3386d2ae"}, {file = "mypy_boto3_glue-1.28.36-py3-none-any.whl", hash = "sha256:73bc14616ac65a5c02adea5efba7bbbcf8207cd0c0e3237c13d351ebc916338d"}, ] -mypy-boto3-lakeformation = [ + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-lakeformation" +version = "1.28.36" +description = "Type annotations for boto3.LakeFormation 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = true +python-versions = ">=3.7" +files = [ {file = "mypy-boto3-lakeformation-1.28.36.tar.gz", hash = "sha256:9327cf0d28a09abf5bd90ae946ce7420b32a3b979a1a3554ac93716c3dceacb0"}, {file = "mypy_boto3_lakeformation-1.28.36-py3-none-any.whl", hash = "sha256:9525a8ab3d69632d4ec83eb565ff7fdfa1181fbdf032bcff4a20d4f8a0350688"}, ] -mypy-boto3-sts = [ + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-sts" +version = "1.28.37" +description = "Type annotations for boto3.STS 1.28.37 service generated with mypy-boto3-builder 7.18.2" +optional = true +python-versions = ">=3.7" +files = [ {file = "mypy-boto3-sts-1.28.37.tar.gz", hash = "sha256:54d64ca695ab90a51c68ac1e67ff9eae7ec69f926649e320a3b90ed1ec841a95"}, {file = "mypy_boto3_sts-1.28.37-py3-none-any.whl", hash = "sha256:24106ff30ecfe7ad0538657bbd00b6009418a5382b323cac46e0e26c1f5d50fb"}, ] -mypy-extensions = [ + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -natsort = [ + +[[package]] +name = "natsort" +version = "8.4.0" +description = "Simple yet flexible natural sorting in Python." +optional = false +python-versions = ">=3.7" +files = [ {file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"}, {file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"}, ] -networkx = [ + +[package.extras] +fast = ["fastnumbers (>=2.0.0)"] +icu = ["PyICU (>=1.0.0)"] + +[[package]] +name = "networkx" +version = "2.8.8" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.8" +files = [ {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, ] -nr-date = [ + +[package.extras] +default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=0.982)", "pre-commit (>=2.20)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.2)", "pydata-sphinx-theme (>=0.11)", "sphinx (>=5.2)", "sphinx-gallery (>=0.11)", "texext (>=0.6.6)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.9)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] + +[[package]] +name = "nr-date" +version = "2.1.0" +description = "" +optional = false +python-versions = ">=3.6,<4.0" +files = [ {file = "nr_date-2.1.0-py3-none-any.whl", hash = "sha256:bd672a9dfbdcf7c4b9289fea6750c42490eaee08036a72059dcc78cb236ed568"}, {file = "nr_date-2.1.0.tar.gz", hash = "sha256:0643aea13bcdc2a8bc56af9d5e6a89ef244c9744a1ef00cdc735902ba7f7d2e6"}, ] -nr-stream = [ + +[[package]] +name = "nr-stream" +version = "1.1.5" +description = "" +optional = false +python-versions = ">=3.6,<4.0" +files = [ {file = "nr_stream-1.1.5-py3-none-any.whl", hash = "sha256:47e12150b331ad2cb729cfd9d2abd281c9949809729ba461c6aa87dd9927b2d4"}, {file = "nr_stream-1.1.5.tar.gz", hash = "sha256:eb0216c6bfc61a46d4568dba3b588502c610ec8ddef4ac98f3932a2bd7264f65"}, ] -nr-util = [ + +[[package]] +name = "nr-util" +version = "0.8.12" +description = "General purpose Python utility library." +optional = false +python-versions = ">=3.7,<4.0" +files = [ {file = "nr.util-0.8.12-py3-none-any.whl", hash = "sha256:91da02ac9795eb8e015372275c1efe54bac9051231ee9b0e7e6f96b0b4e7d2bb"}, {file = "nr.util-0.8.12.tar.gz", hash = "sha256:a4549c2033d99d2f0379b3f3d233fd2a8ade286bbf0b3ad0cc7cea16022214f4"}, ] -numpy = [ + +[package.dependencies] +deprecated = ">=1.2.0,<2.0.0" +typing-extensions = ">=3.0.0" + +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, @@ -7580,6 +5391,15 @@ numpy = [ {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "numpy" +version = "1.26.1" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = "<3.13,>=3.9" +files = [ {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"}, {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"}, {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"}, @@ -7613,11 +5433,30 @@ numpy = [ {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"}, {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"}, ] -oauthlib = [ + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, ] -onnx = [ + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "onnx" +version = "1.15.0" +description = "Open Neural Network Exchange" +optional = true +python-versions = ">=3.8" +files = [ {file = "onnx-1.15.0-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:51cacb6aafba308aaf462252ced562111f6991cdc7bc57a6c554c3519453a8ff"}, {file = "onnx-1.15.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:0aee26b6f7f7da7e840de75ad9195a77a147d0662c94eaa6483be13ba468ffc1"}, {file = "onnx-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baf6ef6c93b3b843edb97a8d5b3d229a1301984f3f8dee859c29634d2083e6f9"}, @@ -7644,7 +5483,21 @@ onnx = [ {file = "onnx-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d7a3e2d79d371e272e39ae3f7547e0b116d0c7f774a4004e97febe6c93507f"}, {file = "onnx-1.15.0.tar.gz", hash = "sha256:b18461a7d38f286618ca2a6e78062a2a9c634ce498e631e708a8041b00094825"}, ] -onnxruntime = [ + +[package.dependencies] +numpy = "*" +protobuf = ">=3.20.2" + +[package.extras] +reference = ["Pillow", "google-re2"] + +[[package]] +name = "onnxruntime" +version = "1.16.1" +description = "ONNX Runtime is a runtime accelerator for Machine Learning models" +optional = true +python-versions = "*" +files = [ {file = "onnxruntime-1.16.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:28b2c7f444b4119950b69370801cd66067f403d19cbaf2a444735d7c269cce4a"}, {file = "onnxruntime-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c24e04f33e7899f6aebb03ed51e51d346c1f906b05c5569d58ac9a12d38a2f58"}, {file = "onnxruntime-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fa93b166f2d97063dc9f33c5118c5729a4a5dd5617296b6dbef42f9047b3e81"}, @@ -7670,43 +5523,166 @@ onnxruntime = [ {file = "onnxruntime-1.16.1-cp39-cp39-win32.whl", hash = "sha256:85771adb75190db9364b25ddec353ebf07635b83eb94b64ed014f1f6d57a3857"}, {file = "onnxruntime-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:d32d2b30799c1f950123c60ae8390818381fd5f88bdf3627eeca10071c155dc5"}, ] -openpyxl = [ + +[package.dependencies] +coloredlogs = "*" +flatbuffers = "*" +numpy = ">=1.21.6" +packaging = "*" +protobuf = "*" +sympy = "*" + +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = true +python-versions = ">=3.6" +files = [ {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, ] -opentelemetry-api = [ + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "opentelemetry-api" +version = "1.15.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.7" +files = [ {file = "opentelemetry_api-1.15.0-py3-none-any.whl", hash = "sha256:e6c2d2e42140fd396e96edf75a7ceb11073f4efb4db87565a431cc9d0f93f2e0"}, {file = "opentelemetry_api-1.15.0.tar.gz", hash = "sha256:79ab791b4aaad27acc3dc3ba01596db5b5aac2ef75c70622c6038051d6c2cded"}, ] -opentelemetry-exporter-otlp = [ + +[package.dependencies] +deprecated = ">=1.2.6" +setuptools = ">=16.0" + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.15.0" +description = "OpenTelemetry Collector Exporters" +optional = false +python-versions = ">=3.7" +files = [ {file = "opentelemetry_exporter_otlp-1.15.0-py3-none-any.whl", hash = "sha256:79f22748b6a54808a0448093dfa189c8490e729f67c134d4c992533d9393b33e"}, {file = "opentelemetry_exporter_otlp-1.15.0.tar.gz", hash = "sha256:4f7c49751d9720e2e726e13b0bb958ccade4e29122c305d92c033da432c8d2c5"}, ] -opentelemetry-exporter-otlp-proto-grpc = [ + +[package.dependencies] +opentelemetry-exporter-otlp-proto-grpc = "1.15.0" +opentelemetry-exporter-otlp-proto-http = "1.15.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.15.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.7" +files = [ {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0-py3-none-any.whl", hash = "sha256:c2a5492ba7d140109968135d641d06ce3c5bd73c50665f787526065d57d7fd1d"}, {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0.tar.gz", hash = "sha256:844f2a4bb9bcda34e4eb6fe36765e5031aacb36dc60ed88c90fc246942ea26e7"}, ] -opentelemetry-exporter-otlp-proto-http = [ + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-proto = "1.15.0" +opentelemetry-sdk = ">=1.12,<2.0" + +[package.extras] +test = ["pytest-grpc"] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.15.0" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.7" +files = [ {file = "opentelemetry_exporter_otlp_proto_http-1.15.0-py3-none-any.whl", hash = "sha256:3ec2a02196c8a54bf5cbf7fe623a5238625638e83b6047a983bdf96e2bbb74c0"}, {file = "opentelemetry_exporter_otlp_proto_http-1.15.0.tar.gz", hash = "sha256:11b2c814249a49b22f6cca7a06b05701f561d577b747f3660dfd67b6eb9daf9c"}, ] -opentelemetry-proto = [ + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-proto = "1.15.0" +opentelemetry-sdk = ">=1.12,<2.0" +requests = ">=2.7,<3.0" + +[package.extras] +test = ["responses (==0.22.0)"] + +[[package]] +name = "opentelemetry-proto" +version = "1.15.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.7" +files = [ {file = "opentelemetry_proto-1.15.0-py3-none-any.whl", hash = "sha256:044b6d044b4d10530f250856f933442b8753a17f94ae37c207607f733fb9a844"}, {file = "opentelemetry_proto-1.15.0.tar.gz", hash = "sha256:9c4008e40ac8cab359daac283fbe7002c5c29c77ea2674ad5626a249e64e0101"}, ] -opentelemetry-sdk = [ + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.15.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.7" +files = [ {file = "opentelemetry_sdk-1.15.0-py3-none-any.whl", hash = "sha256:555c533e9837766119bbccc7a80458c9971d853a6f1da683a2246cd5e53b4645"}, {file = "opentelemetry_sdk-1.15.0.tar.gz", hash = "sha256:98dbffcfeebcbff12c0c974292d6ea603180a145904cf838b1fe4d5c99078425"}, ] -opentelemetry-semantic-conventions = [ + +[package.dependencies] +opentelemetry-api = "1.15.0" +opentelemetry-semantic-conventions = "0.36b0" +setuptools = ">=16.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.36b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.7" +files = [ {file = "opentelemetry_semantic_conventions-0.36b0-py3-none-any.whl", hash = "sha256:adc05635e87b9d3e007c9f530eed487fc3ef2177d02f82f674f28ebf9aff8243"}, {file = "opentelemetry_semantic_conventions-0.36b0.tar.gz", hash = "sha256:829dc221795467d98b773c04096e29be038d77526dc8d6ac76f546fb6279bf01"}, ] -ordered-set = [ + +[[package]] +name = "ordered-set" +version = "4.1.0" +description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +optional = false +python-versions = ">=3.7" +files = [ {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, ] -orjson = [ + +[package.extras] +dev = ["black", "mypy", "pytest"] + +[[package]] +name = "orjson" +version = "3.9.5" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.7" +files = [ {file = "orjson-3.9.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ad6845912a71adcc65df7c8a7f2155eba2096cf03ad2c061c93857de70d699ad"}, {file = "orjson-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e298e0aacfcc14ef4476c3f409e85475031de24e5b23605a465e9bf4b2156273"}, {file = "orjson-3.9.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83c9939073281ef7dd7c5ca7f54cceccb840b440cec4b8a326bda507ff88a0a6"}, @@ -7768,11 +5744,25 @@ orjson = [ {file = "orjson-3.9.5-cp39-none-win_amd64.whl", hash = "sha256:91dda66755795ac6100e303e206b636568d42ac83c156547634256a2e68de694"}, {file = "orjson-3.9.5.tar.gz", hash = "sha256:6daf5ee0b3cf530b9978cdbf71024f1c16ed4a67d05f6ec435c6e7fe7a52724c"}, ] -packaging = [ + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] -pandas = [ + +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, @@ -7798,6 +5788,48 @@ pandas = [ {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + +[[package]] +name = "pandas" +version = "2.2.0" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, @@ -7828,23 +5860,95 @@ pandas = [ {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, ] -parsedatetime = [ + +[package.dependencies] +numpy = {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""} +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "parsedatetime" +version = "2.4" +description = "Parse human-readable date/time text." +optional = false +python-versions = "*" +files = [ {file = "parsedatetime-2.4-py2-none-any.whl", hash = "sha256:9ee3529454bf35c40a77115f5a596771e59e1aee8c53306f346c461b8e913094"}, {file = "parsedatetime-2.4.tar.gz", hash = "sha256:3d817c58fb9570d1eec1dd46fa9448cd644eeed4fb612684b02dfda3a79cb84b"}, ] -pathspec = [ + +[package.dependencies] +future = "*" + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] -pathvalidate = [ + +[[package]] +name = "pathvalidate" +version = "3.1.0" +description = "pathvalidate is a Python library to sanitize/validate a string such as filenames/file-paths/etc." +optional = false +python-versions = ">=3.7" +files = [ {file = "pathvalidate-3.1.0-py3-none-any.whl", hash = "sha256:912fd1d2e1a2a6a6f98da36a91f21ed86746473810ff625b9c34f3d06c0caa1d"}, {file = "pathvalidate-3.1.0.tar.gz", hash = "sha256:426970226e24199fd90d93995d223c1e28bda967cdf4370755a14cdf72a2a8ee"}, ] -pbr = [ + +[package.extras] +docs = ["Sphinx (>=2.4)", "sphinx-rtd-theme (>=1.2.2)", "urllib3 (<2)"] +test = ["Faker (>=1.0.8)", "allpairspy (>=2)", "click (>=6.2)", "pytest (>=6.0.1)", "pytest-discord (>=0.1.2)", "pytest-md-report (>=0.3)"] + +[[package]] +name = "pbr" +version = "5.11.1" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] -pendulum = [ + +[[package]] +name = "pendulum" +version = "3.0.0" +description = "Python datetimes made easy" +optional = false +python-versions = ">=3.8" +files = [ {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, @@ -7929,43 +6033,154 @@ pendulum = [ {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, ] -pipdeptree = [ + +[package.dependencies] +"backports.zoneinfo" = {version = ">=0.2.1", markers = "python_version < \"3.9\""} +importlib-resources = {version = ">=5.9.0", markers = "python_version < \"3.9\""} +python-dateutil = ">=2.6" +tzdata = ">=2020.1" + +[package.extras] +test = ["time-machine (>=2.6.0)"] + +[[package]] +name = "pipdeptree" +version = "2.9.6" +description = "Command line utility to show dependency tree of packages." +optional = true +python-versions = ">=3.7" +files = [ {file = "pipdeptree-2.9.6-py3-none-any.whl", hash = "sha256:de93f990d21224297c9f03e057da5a3dc65ff732a0147945dd9421671f13626b"}, {file = "pipdeptree-2.9.6.tar.gz", hash = "sha256:f815caf165e89c576ce659b866c7a82ae4590420c2d020a92d32e45097f8bc73"}, ] -pkgutil-resolve-name = [ + +[package.extras] +graphviz = ["graphviz (>=0.20.1)"] +test = ["covdefaults (>=2.3)", "diff-cover (>=7.6)", "pip (>=23.1.2)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "virtualenv (>=20.23.1,<21)"] + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] -platformdirs = [ + +[[package]] +name = "platformdirs" +version = "3.8.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, ] -pluggy = [ + +[package.extras] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] -ply = [ + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "ply" +version = "3.11" +description = "Python Lex & Yacc" +optional = false +python-versions = "*" +files = [ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, ] -portalocker = [ + +[[package]] +name = "portalocker" +version = "2.7.0" +description = "Wraps the portalocker recipe for easy usage" +optional = true +python-versions = ">=3.5" +files = [ {file = "portalocker-2.7.0-py2.py3-none-any.whl", hash = "sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983"}, {file = "portalocker-2.7.0.tar.gz", hash = "sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51"}, ] -prefixed = [ + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] + +[[package]] +name = "prefixed" +version = "0.7.0" +description = "Prefixed alternative numeric library" +optional = false +python-versions = "*" +files = [ {file = "prefixed-0.7.0-py2.py3-none-any.whl", hash = "sha256:537b0e4ff4516c4578f277a41d7104f769d6935ae9cdb0f88fed82ec7b3c0ca5"}, {file = "prefixed-0.7.0.tar.gz", hash = "sha256:0b54d15e602eb8af4ac31b1db21a37ea95ce5890e0741bb0dd9ded493cefbbe9"}, ] -prison = [ + +[[package]] +name = "prison" +version = "0.2.1" +description = "Rison encoder/decoder" +optional = false +python-versions = "*" +files = [ {file = "prison-0.2.1-py2.py3-none-any.whl", hash = "sha256:f90bab63fca497aa0819a852f64fb21a4e181ed9f6114deaa5dc04001a7555c5"}, {file = "prison-0.2.1.tar.gz", hash = "sha256:e6cd724044afcb1a8a69340cad2f1e3151a5839fd3a8027fd1357571e797c599"}, ] -proto-plus = [ + +[package.dependencies] +six = "*" + +[package.extras] +dev = ["nose", "pipreqs", "twine"] + +[[package]] +name = "proto-plus" +version = "1.22.3" +description = "Beautiful, Pythonic protocol buffers." +optional = true +python-versions = ">=3.6" +files = [ {file = "proto-plus-1.22.3.tar.gz", hash = "sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b"}, {file = "proto_plus-1.22.3-py3-none-any.whl", hash = "sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df"}, ] -protobuf = [ + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "4.24.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ {file = "protobuf-4.24.2-cp310-abi3-win32.whl", hash = "sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924"}, {file = "protobuf-4.24.2-cp310-abi3-win_amd64.whl", hash = "sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3"}, {file = "protobuf-4.24.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880"}, @@ -7980,7 +6195,14 @@ protobuf = [ {file = "protobuf-4.24.2-py3-none-any.whl", hash = "sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e"}, {file = "protobuf-4.24.2.tar.gz", hash = "sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e"}, ] -psutil = [ + +[[package]] +name = "psutil" +version = "5.9.5" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, @@ -7996,7 +6218,17 @@ psutil = [ {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, ] -psycopg2-binary = [ + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "psycopg2-binary" +version = "2.9.7" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = true +python-versions = ">=3.6" +files = [ {file = "psycopg2-binary-2.9.7.tar.gz", hash = "sha256:1b918f64a51ffe19cd2e230b3240ba481330ce1d4b7875ae67305bd1d37b041c"}, {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea5f8ee87f1eddc818fc04649d952c526db4426d26bab16efbe5a0c52b27d6ab"}, {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2993ccb2b7e80844d534e55e0f12534c2871952f78e0da33c35e648bf002bbff"}, @@ -8058,14 +6290,39 @@ psycopg2-binary = [ {file = "psycopg2_binary-2.9.7-cp39-cp39-win32.whl", hash = "sha256:18f12632ab516c47c1ac4841a78fddea6508a8284c7cf0f292cb1a523f2e2379"}, {file = "psycopg2_binary-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb3b8d55924a6058a26db69fb1d3e7e32695ff8b491835ba9f479537e14dcf9f"}, ] -psycopg2cffi = [ + +[[package]] +name = "psycopg2cffi" +version = "2.9.0" +description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=master" +optional = true +python-versions = "*" +files = [ {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"}, ] -py = [ + +[package.dependencies] +cffi = ">=1.0" +six = "*" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -pyarrow = [ + +[[package]] +name = "pyarrow" +version = "14.0.1" +description = "Python library for Apache Arrow" +optional = true +python-versions = ">=3.8" +files = [ {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, @@ -8103,31 +6360,106 @@ pyarrow = [ {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, ] -pyasn1 = [ + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] +name = "pyasn1" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, ] -pyasn1-modules = [ + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, ] -pyathena = [ + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pyathena" +version = "3.0.6" +description = "Python DB API 2.0 (PEP 249) client for Amazon Athena" +optional = true +python-versions = ">=3.8.1" +files = [ {file = "pyathena-3.0.6-py3-none-any.whl", hash = "sha256:27fb606a73644e62be8ef9b86cdf583ab3cb9f8cac9c2ad8f05b7ad6d4eaaa87"}, {file = "pyathena-3.0.6.tar.gz", hash = "sha256:ee6ea175134894209af2c6be1859b7be4371f7741faa7a58f9f97905ff6a73a4"}, ] -pycodestyle = [ + +[package.dependencies] +boto3 = ">=1.26.4" +botocore = ">=1.29.4" +fsspec = "*" +tenacity = ">=4.1.0" + +[package.extras] +arrow = ["pyarrow (>=7.0.0)"] +fastparquet = ["fastparquet (>=0.4.0)"] +pandas = ["pandas (>=1.3.0)"] +sqlalchemy = ["sqlalchemy (>=1.0.0)"] + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] -pycparser = [ + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -pydantic = [ + +[[package]] +name = "pydantic" +version = "2.5.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ {file = "pydantic-2.5.0-py3-none-any.whl", hash = "sha256:7ce6e766c456ad026fe5712f7bcf036efc34bd5d107b3e669ef7ea01b3a9050c"}, {file = "pydantic-2.5.0.tar.gz", hash = "sha256:69bd6fb62d2d04b7055f59a396993486a2ee586c43a0b89231ce0000de07627c"}, ] -pydantic-core = [ + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.14.1" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.1" +description = "" +optional = false +python-versions = ">=3.7" +files = [ {file = "pydantic_core-2.14.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:812beca1dcb2b722cccc7e9c620bd972cbc323321194ec2725eab3222e6ac573"}, {file = "pydantic_core-2.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2ccdc53cb88e51c7d47d74c59630d7be844428f6b8d463055ffad6f0392d8da"}, {file = "pydantic_core-2.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd937733bf2fe7d6a8bf208c12741f1f730b7bf5636033877767a75093c29b8a"}, @@ -8230,23 +6562,89 @@ pydantic-core = [ {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d983222223f63e323a5f497f5b85e211557a5d8fb670dc88f343784502b466ba"}, {file = "pydantic_core-2.14.1.tar.gz", hash = "sha256:0d82a6ee815388a362885186e431fac84c7a06623bc136f508e9f88261d8cadb"}, ] -pydoc-markdown = [ + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydoc-markdown" +version = "4.8.2" +description = "Create Python API documentation in Markdown format." +optional = false +python-versions = ">=3.7,<4.0" +files = [ {file = "pydoc_markdown-4.8.2-py3-none-any.whl", hash = "sha256:203f74119e6bb2f9deba43d452422de7c8ec31955b61e0620fa4dd8c2611715f"}, {file = "pydoc_markdown-4.8.2.tar.gz", hash = "sha256:fb6c927e31386de17472d42f9bd3d3be2905977d026f6216881c65145aa67f0b"}, ] -pyflakes = [ + +[package.dependencies] +click = ">=7.1,<9.0" +"databind.core" = ">=4.4.0,<5.0.0" +"databind.json" = ">=4.4.0,<5.0.0" +docspec = ">=2.2.1,<3.0.0" +docspec-python = ">=2.2.1,<3.0.0" +docstring-parser = ">=0.11,<0.12" +jinja2 = ">=3.0.0,<4.0.0" +"nr.util" = ">=0.7.5,<1.0.0" +PyYAML = ">=5.0,<7.0" +requests = ">=2.23.0,<3.0.0" +tomli = ">=2.0.0,<3.0.0" +tomli_w = ">=1.0.0,<2.0.0" +watchdog = "*" +yapf = ">=0.30.0" + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] -pygments = [ + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] -pyjwt = [ + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] -pymongo = [ + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pymongo" +version = "4.6.0" +description = "Python driver for MongoDB " +optional = false +python-versions = ">=3.7" +files = [ {file = "pymongo-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c011bd5ad03cc096f99ffcfdd18a1817354132c1331bed7a837a25226659845f"}, {file = "pymongo-4.6.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:5e63146dbdb1eac207464f6e0cfcdb640c9c5ff0f57b754fa96fe252314a1dc6"}, {file = "pymongo-4.6.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:2972dd1f1285866aba027eff2f4a2bbf8aa98563c2ced14cb34ee5602b36afdf"}, @@ -8277,6 +6675,7 @@ pymongo = [ {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab6bcc8e424e07c1d4ba6df96f7fb963bcb48f590b9456de9ebd03b88084fe8"}, {file = "pymongo-4.6.0-cp312-cp312-win32.whl", hash = "sha256:47aa128be2e66abd9d1a9b0437c62499d812d291f17b55185cb4aa33a5f710a4"}, {file = "pymongo-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:014e7049dd019a6663747ca7dae328943e14f7261f7c1381045dfc26a04fa330"}, + {file = "pymongo-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e24025625bad66895b1bc3ae1647f48f0a92dd014108fb1be404c77f0b69ca67"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:288c21ab9531b037f7efa4e467b33176bc73a0c27223c141b822ab4a0e66ff2a"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:747c84f4e690fbe6999c90ac97246c95d31460d890510e4a3fa61b7d2b87aa34"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:055f5c266e2767a88bb585d01137d9c7f778b0195d3dbf4a487ef0638be9b651"}, @@ -8329,11 +6728,41 @@ pymongo = [ {file = "pymongo-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:107a234dc55affc5802acb3b6d83cbb8c87355b38a9457fcd8806bdeb8bce161"}, {file = "pymongo-4.6.0.tar.gz", hash = "sha256:fb1c56d891f9e34303c451998ef62ba52659648bb0d75b03c5e4ac223a3342c2"}, ] -pymysql = [ + +[package.dependencies] +dnspython = ">=1.16.0,<3.0.0" + +[package.extras] +aws = ["pymongo-auth-aws (<2.0.0)"] +encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"] +gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] +ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +snappy = ["python-snappy"] +test = ["pytest (>=7)"] +zstd = ["zstandard"] + +[[package]] +name = "pymysql" +version = "1.1.0" +description = "Pure Python MySQL Driver" +optional = false +python-versions = ">=3.7" +files = [ {file = "PyMySQL-1.1.0-py3-none-any.whl", hash = "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7"}, {file = "PyMySQL-1.1.0.tar.gz", hash = "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96"}, ] -pyodbc = [ + +[package.extras] +ed25519 = ["PyNaCl (>=1.4.0)"] +rsa = ["cryptography"] + +[[package]] +name = "pyodbc" +version = "4.0.39" +description = "DB API Module for ODBC" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ {file = "pyodbc-4.0.39-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74af348dbaee4885998858daf50c8964e767629ecf6c195868b016367b0bb861"}, {file = "pyodbc-4.0.39-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f5901b57eaef0761f4cf02bca8e7c63f589fd0fd723a79f6ccf1ea1275372e5"}, {file = "pyodbc-4.0.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0db69478d00fcd8d0b9bdde8aca0b0eada341fd6ed8c2da84b594b928c84106"}, @@ -8370,70 +6799,267 @@ pyodbc = [ {file = "pyodbc-4.0.39-cp39-cp39-win_amd64.whl", hash = "sha256:305c7d6337e2d4c8350677cc641b343fc0197b7b9bc167815c66b64545c67a53"}, {file = "pyodbc-4.0.39.tar.gz", hash = "sha256:e528bb70dd6d6299ee429868925df0866e3e919c772b9eff79c8e17920d8f116"}, ] -pyopenssl = [ + +[[package]] +name = "pyopenssl" +version = "23.2.0" +description = "Python wrapper module around the OpenSSL library" +optional = true +python-versions = ">=3.6" +files = [ {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, ] -pyparsing = [ + +[package.dependencies] +cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" + +[package.extras] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] +test = ["flaky", "pretend", "pytest (>=3.0.1)"] + +[[package]] +name = "pyparsing" +version = "3.1.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, ] -pypdf2 = [ + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pypdf2" +version = "3.0.1" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +optional = false +python-versions = ">=3.6" +files = [ {file = "PyPDF2-3.0.1.tar.gz", hash = "sha256:a74408f69ba6271f71b9352ef4ed03dc53a31aa404d29b5d31f53bfecfee1440"}, {file = "pypdf2-3.0.1-py3-none-any.whl", hash = "sha256:d16e4205cfee272fbdc0568b68d82be796540b1537508cef59388f839c191928"}, ] -pyreadline3 = [ + +[package.dependencies] +typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +crypto = ["PyCryptodome"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow", "PyCryptodome"] +image = ["Pillow"] + +[[package]] +name = "pyreadline3" +version = "3.4.1" +description = "A python implementation of GNU readline." +optional = true +python-versions = "*" +files = [ {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] -pytest = [ + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] -pytest-asyncio = [ + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.5" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, ] -pytest-cases = [ + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cases" +version = "3.6.14" +description = "Separate test code from test cases in pytest." +optional = false +python-versions = "*" +files = [ {file = "pytest-cases-3.6.14.tar.gz", hash = "sha256:7455e6ca57a544c1bfdd8b56ace08c1c1ce4c6572a8aab8f1bd351dc25a10b6b"}, {file = "pytest_cases-3.6.14-py2.py3-none-any.whl", hash = "sha256:a087f3d019efd8942d0f0dc3fb526bedf9f83d742c40289e9623f6788aff7257"}, ] -pytest-console-scripts = [ + +[package.dependencies] +decopatch = "*" +makefun = ">=1.9.5" + +[[package]] +name = "pytest-console-scripts" +version = "1.4.1" +description = "Pytest plugin for testing console scripts" +optional = false +python-versions = ">=3.8" +files = [ {file = "pytest-console-scripts-1.4.1.tar.gz", hash = "sha256:5a826ed84cc0afa202eb9e44381d7d762f7bdda8e0c23f9f79a7f1f44cf4a895"}, {file = "pytest_console_scripts-1.4.1-py3-none-any.whl", hash = "sha256:ad860a951a90eca4bd3bd1159b8f5428633ba4ea01abd5c9526b67a95f65437a"}, ] -pytest-forked = [ + +[package.dependencies] +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +pytest = ">=4.0.0" + +[[package]] +name = "pytest-forked" +version = "1.6.0" +description = "run tests in isolated forked subprocesses" +optional = false +python-versions = ">=3.7" +files = [ {file = "pytest-forked-1.6.0.tar.gz", hash = "sha256:4dafd46a9a600f65d822b8f605133ecf5b3e1941ebb3588e943b4e3eb71a5a3f"}, {file = "pytest_forked-1.6.0-py3-none-any.whl", hash = "sha256:810958f66a91afb1a1e2ae83089d8dc1cd2437ac96b12963042fbb9fb4d16af0"}, ] -pytest-order = [ + +[package.dependencies] +py = "*" +pytest = ">=3.10" + +[[package]] +name = "pytest-order" +version = "1.1.0" +description = "pytest plugin to run your tests in a specific order" +optional = false +python-versions = ">=3.6" +files = [ {file = "pytest-order-1.1.0.tar.gz", hash = "sha256:139d25b30826b78eebb42722f747eab14c44b88059d7a71d4f79d14a057269a5"}, {file = "pytest_order-1.1.0-py3-none-any.whl", hash = "sha256:3b3730969c97900fa5cd31ecff80847680ed56b2490954565c14949ba60d9371"}, ] -python-daemon = [ + +[package.dependencies] +pytest = [ + {version = ">=5.0", markers = "python_version < \"3.10\""}, + {version = ">=6.2.4", markers = "python_version >= \"3.10\""}, +] + +[[package]] +name = "python-daemon" +version = "3.0.1" +description = "Library to implement a well-behaved Unix daemon process." +optional = false +python-versions = ">=3" +files = [ {file = "python-daemon-3.0.1.tar.gz", hash = "sha256:6c57452372f7eaff40934a1c03ad1826bf5e793558e87fef49131e6464b4dae5"}, {file = "python_daemon-3.0.1-py3-none-any.whl", hash = "sha256:42bb848a3260a027fa71ad47ecd959e471327cb34da5965962edd5926229f341"}, ] -python-dateutil = [ + +[package.dependencies] +docutils = "*" +lockfile = ">=0.10" +setuptools = ">=62.4.0" + +[package.extras] +devel = ["coverage", "docutils", "isort", "testscenarios (>=0.4)", "testtools", "twine"] +test = ["coverage", "docutils", "testscenarios (>=0.4)", "testtools"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-nvd3 = [ + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-nvd3" +version = "0.15.0" +description = "Python NVD3 - Chart Library for d3.js" +optional = false +python-versions = "*" +files = [ {file = "python-nvd3-0.15.0.tar.gz", hash = "sha256:fbd75ff47e0ef255b4aa4f3a8b10dc8b4024aa5a9a7abed5b2406bd3cb817715"}, ] -python-slugify = [ + +[package.dependencies] +Jinja2 = ">=2.8" +python-slugify = ">=1.2.5" + +[[package]] +name = "python-slugify" +version = "8.0.1" +description = "A Python slugify application that also handles Unicode" +optional = false +python-versions = ">=3.7" +files = [ {file = "python-slugify-8.0.1.tar.gz", hash = "sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27"}, {file = "python_slugify-8.0.1-py2.py3-none-any.whl", hash = "sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395"}, ] -pytimeparse = [ + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + +[[package]] +name = "pytimeparse" +version = "1.1.8" +description = "Time expression parser" +optional = false +python-versions = "*" +files = [ {file = "pytimeparse-1.1.8-py2.py3-none-any.whl", hash = "sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd"}, {file = "pytimeparse-1.1.8.tar.gz", hash = "sha256:e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a"}, ] -pytz = [ + +[[package]] +name = "pytz" +version = "2023.3" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] -pywin32 = [ + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = true +python-versions = "*" +files = [ {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, @@ -8449,16 +7075,31 @@ pywin32 = [ {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] -pywin32-ctypes = [ + +[[package]] +name = "pywin32-ctypes" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +optional = true +python-versions = ">=3.6" +files = [ {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, ] -pyyaml = [ + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -8466,6 +7107,7 @@ pyyaml = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, @@ -8490,6 +7132,7 @@ pyyaml = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -8497,22 +7140,85 @@ pyyaml = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] -qdrant-client = [ + +[[package]] +name = "qdrant-client" +version = "1.6.4" +description = "Client library for the Qdrant vector search engine" +optional = true +python-versions = ">=3.8,<3.13" +files = [ {file = "qdrant_client-1.6.4-py3-none-any.whl", hash = "sha256:db4696978d6a62d78ff60f70b912383f1e467bda3053f732b01ddb5f93281b10"}, {file = "qdrant_client-1.6.4.tar.gz", hash = "sha256:bbd65f383b6a55a9ccf4e301250fa925179340dd90cfde9b93ce4230fd68867b"}, ] -redshift-connector = [ + +[package.dependencies] +fastembed = {version = "0.1.1", optional = true, markers = "python_version < \"3.12\" and extra == \"fastembed\""} +grpcio = ">=1.41.0" +grpcio-tools = ">=1.41.0" +httpx = {version = ">=0.14.0", extras = ["http2"]} +numpy = [ + {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, + {version = ">=1.26", markers = "python_version >= \"3.12\""}, +] +portalocker = ">=2.7.0,<3.0.0" +pydantic = ">=1.10.8" +urllib3 = ">=1.26.14,<2.0.0" + +[package.extras] +fastembed = ["fastembed (==0.1.1)"] + +[[package]] +name = "redshift-connector" +version = "2.0.915" +description = "Redshift interface library" +optional = true +python-versions = ">=3.6" +files = [ {file = "redshift_connector-2.0.915-py3-none-any.whl", hash = "sha256:d02e8d6fa01dd46504c879953f6abd7fa72980edd1e6a80202448fe35fb4c9e4"}, ] -referencing = [ + +[package.dependencies] +beautifulsoup4 = ">=4.7.0,<5.0.0" +boto3 = ">=1.9.201,<2.0.0" +botocore = ">=1.12.201,<2.0.0" +lxml = ">=4.6.5" +packaging = "*" +pytz = ">=2020.1" +requests = ">=2.23.0,<3.0.0" +scramp = ">=1.2.0,<1.5.0" +setuptools = "*" + +[package.extras] +full = ["numpy", "pandas"] + +[[package]] +name = "referencing" +version = "0.30.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, ] -regex = [ + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, @@ -8607,39 +7313,147 @@ regex = [ {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] -requests = [ + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] -requests-mock = [ + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, ] -requests-oauthlib = [ + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, ] -requests-toolbelt = [ + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, ] -requirements-parser = [ + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "requirements-parser" +version = "0.5.0" +description = "This is a small Python module for parsing Pip requirement files." +optional = false +python-versions = ">=3.6,<4.0" +files = [ {file = "requirements-parser-0.5.0.tar.gz", hash = "sha256:3336f3a3ae23e06d3f0f88595e4052396e3adf91688787f637e5d2ca1a904069"}, {file = "requirements_parser-0.5.0-py3-none-any.whl", hash = "sha256:e7fcdcd04f2049e73a9fb150d8a0f9d51ce4108f5f7cbeac74c484e17b12bcd9"}, ] -rfc3339-validator = [ + +[package.dependencies] +types-setuptools = ">=57.0.0" + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, ] -rich = [ + +[package.dependencies] +six = "*" + +[[package]] +name = "rich" +version = "13.5.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, ] -rich-argparse = [ + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rich-argparse" +version = "1.3.0" +description = "Rich help formatters for argparse and optparse" +optional = false +python-versions = ">=3.7" +files = [ {file = "rich_argparse-1.3.0-py3-none-any.whl", hash = "sha256:1a5eda1659c0a215862fe3630fcbe68d7792f18a8106baaf4e005b9896acc6f6"}, {file = "rich_argparse-1.3.0.tar.gz", hash = "sha256:974cc1ba0aaa0d6aabc09ab1b78f9ba928670e08590f9551121bcbc60c75b74a"}, ] -rpds-py = [ + +[package.dependencies] +rich = ">=11.0.0" + +[[package]] +name = "rpds-py" +version = "0.10.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ {file = "rpds_py-0.10.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c1e0e9916301e3b3d970814b1439ca59487f0616d30f36a44cead66ee1748c31"}, {file = "rpds_py-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ce8caa29ebbdcde67e5fd652c811d34bc01f249dbc0d61e5cc4db05ae79a83b"}, {file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad277f74b1c164f7248afa968700e410651eb858d7c160d109fb451dc45a2f09"}, @@ -8738,11 +7552,28 @@ rpds-py = [ {file = "rpds_py-0.10.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:872f3dcaa8bf2245944861d7311179d2c0c9b2aaa7d3b464d99a7c2e401f01fa"}, {file = "rpds_py-0.10.0.tar.gz", hash = "sha256:e36d7369363d2707d5f68950a64c4e025991eb0177db01ccb6aa6facae48b69f"}, ] -rsa = [ + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, ] -ruff = [ + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruff" +version = "0.3.2" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"}, {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"}, {file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"}, @@ -8761,31 +7592,134 @@ ruff = [ {file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"}, {file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"}, ] -s3fs = [ + +[[package]] +name = "s3fs" +version = "2024.2.0" +description = "Convenient Filesystem interface over S3" +optional = true +python-versions = ">= 3.8" +files = [ {file = "s3fs-2024.2.0-py3-none-any.whl", hash = "sha256:c140de37175c157cb662aa6ad7423365df732ac5f10ef5bf7b76078c6333a942"}, {file = "s3fs-2024.2.0.tar.gz", hash = "sha256:f8064f522ad088b56b043047c825734847c0269df19f2613c956d4c20de15b62"}, ] -s3transfer = [ + +[package.dependencies] +aiobotocore = ">=2.5.4,<3.0.0" +aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" +fsspec = "2024.2.0" + +[package.extras] +awscli = ["aiobotocore[awscli] (>=2.5.4,<3.0.0)"] +boto3 = ["aiobotocore[boto3] (>=2.5.4,<3.0.0)"] + +[[package]] +name = "s3transfer" +version = "0.10.0" +description = "An Amazon S3 Transfer Manager" +optional = true +python-versions = ">= 3.8" +files = [ {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, ] -scramp = [ + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "scramp" +version = "1.4.4" +description = "An implementation of the SCRAM protocol." +optional = true +python-versions = ">=3.7" +files = [ {file = "scramp-1.4.4-py3-none-any.whl", hash = "sha256:b142312df7c2977241d951318b7ee923d6b7a4f75ba0f05b621ece1ed616faa3"}, {file = "scramp-1.4.4.tar.gz", hash = "sha256:b7022a140040f33cf863ab2657917ed05287a807b917950489b89b9f685d59bc"}, ] -secretstorage = [ + +[package.dependencies] +asn1crypto = ">=1.5.1" + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +optional = true +python-versions = ">=3.6" +files = [ {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, ] -semver = [ + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" + +[[package]] +name = "semver" +version = "3.0.1" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +files = [ {file = "semver-3.0.1-py3-none-any.whl", hash = "sha256:2a23844ba1647362c7490fe3995a86e097bb590d16f0f32dfc383008f19e4cdf"}, {file = "semver-3.0.1.tar.gz", hash = "sha256:9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1"}, ] -sentry-sdk = [ + +[[package]] +name = "sentry-sdk" +version = "1.30.0" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = "*" +files = [ {file = "sentry-sdk-1.30.0.tar.gz", hash = "sha256:7dc873b87e1faf4d00614afd1058bfa1522942f33daef8a59f90de8ed75cd10c"}, {file = "sentry_sdk-1.30.0-py2.py3-none-any.whl", hash = "sha256:2e53ad63f96bb9da6570ba2e755c267e529edcf58580a2c0d2a11ef26e1e678b"}, ] -setproctitle = [ + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "setproctitle" +version = "1.3.2" +description = "A Python module to customize the process title" +optional = false +python-versions = ">=3.7" +files = [ {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:288943dec88e178bb2fd868adf491197cc0fc8b6810416b1c6775e686bab87fe"}, {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:630f6fe5e24a619ccf970c78e084319ee8be5be253ecc9b5b216b0f474f5ef18"}, {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c877691b90026670e5a70adfbcc735460a9f4c274d35ec5e8a43ce3f8443005"}, @@ -8859,11 +7793,33 @@ setproctitle = [ {file = "setproctitle-1.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7aa0aac1711fadffc1d51e9d00a3bea61f68443d6ac0241a224e4d622489d665"}, {file = "setproctitle-1.3.2.tar.gz", hash = "sha256:b9fb97907c830d260fa0658ed58afd48a86b2b88aac521135c352ff7fd3477fd"}, ] -setuptools = [ + +[package.extras] +test = ["pytest"] + +[[package]] +name = "setuptools" +version = "68.1.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, ] -simplejson = [ + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "simplejson" +version = "3.19.1" +description = "Simple, fast, extensible JSON encoder/decoder for Python" +optional = false +python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ {file = "simplejson-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:412e58997a30c5deb8cab5858b8e2e5b40ca007079f7010ee74565cc13d19665"}, {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e765b1f47293dedf77946f0427e03ee45def2862edacd8868c6cf9ab97c8afbd"}, {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3231100edee292da78948fa0a77dee4e5a94a0a60bcba9ed7a9dc77f4d4bb11e"}, @@ -8950,19 +7906,47 @@ simplejson = [ {file = "simplejson-3.19.1-py3-none-any.whl", hash = "sha256:4710806eb75e87919b858af0cba4ffedc01b463edc3982ded7b55143f39e41e1"}, {file = "simplejson-3.19.1.tar.gz", hash = "sha256:6277f60848a7d8319d27d2be767a7546bc965535b28070e310b3a9af90604a4c"}, ] -six = [ + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -smmap = [ + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.6" +files = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] -sniffio = [ + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -snowflake-connector-python = [ + +[[package]] +name = "snowflake-connector-python" +version = "3.5.0" +description = "Snowflake Connector for Python" +optional = true +python-versions = ">=3.8" +files = [ {file = "snowflake-connector-python-3.5.0.tar.gz", hash = "sha256:654e4a1f68a491544bd8f7c5ab02eb8531df67c5f4309d5253bd204044f8a1b3"}, {file = "snowflake_connector_python-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a365fa4f23be27a4a46d04f73a48ccb1ddad5b9558f100ba592a49571c90a33c"}, {file = "snowflake_connector_python-3.5.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5b648b8f32aa540e9adf14e84ea5d77a6c3c6cbc3cbcf172622a0b8db0e99384"}, @@ -8985,19 +7969,66 @@ snowflake-connector-python = [ {file = "snowflake_connector_python-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee97a8ac0aaf40a7b7420c8936a66d8d33376cd40498ac3d38efa7bb5712d14a"}, {file = "snowflake_connector_python-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:e8cd747e2719ba44dd2ce0e9b1e6f8b03485b2b335a352f3b45138b56fad5888"}, ] -sortedcontainers = [ + +[package.dependencies] +asn1crypto = ">0.24.0,<2.0.0" +certifi = ">=2017.4.17" +cffi = ">=1.9,<2.0.0" +charset-normalizer = ">=2,<4" +cryptography = ">=3.1.0,<42.0.0" +filelock = ">=3.5,<4" +idna = ">=2.5,<4" +keyring = {version = "<16.1.0 || >16.1.0,<25.0.0", optional = true, markers = "extra == \"secure-local-storage\""} +packaging = "*" +platformdirs = ">=2.6.0,<4.0.0" +pyjwt = "<3.0.0" +pyOpenSSL = ">=16.2.0,<24.0.0" +pytz = "*" +requests = "<3.0.0" +sortedcontainers = ">=2.4.0" +tomlkit = "*" +typing-extensions = ">=4.3,<5" +urllib3 = ">=1.21.1,<2.0.0" + +[package.extras] +development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"] +pandas = ["pandas (>=1.0.0,<2.1.0)", "pyarrow"] +secure-local-storage = ["keyring (!=16.1.0,<25.0.0)"] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = true +python-versions = "*" +files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] -soupsieve = [ + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = true +python-versions = ">=3.8" +files = [ {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] -sqlalchemy = [ + +[[package]] +name = "sqlalchemy" +version = "1.4.49" +description = "Database Abstraction Library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, @@ -9007,84 +8038,280 @@ sqlalchemy = [ {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, ] -sqlalchemy-jsonfield = [ + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "sqlalchemy-jsonfield" +version = "1.0.1.post0" +description = "SQLALchemy JSONField implementation for storing dicts at SQL" +optional = false +python-versions = ">=3.7.0" +files = [ {file = "SQLAlchemy-JSONField-1.0.1.post0.tar.gz", hash = "sha256:72a5e714fe0493d2660abd7484a9fd9f492f493a0856288dd22a5decb29f5dc4"}, {file = "SQLAlchemy_JSONField-1.0.1.post0-py3-none-any.whl", hash = "sha256:d6f1e5ee329a3c0d9d164e40d81a2143ac8332e09988fbbaff84179dac5503d4"}, ] -sqlalchemy-utils = [ + +[package.dependencies] +sqlalchemy = "*" + +[[package]] +name = "sqlalchemy-utils" +version = "0.41.1" +description = "Various utility functions for SQLAlchemy." +optional = false +python-versions = ">=3.6" +files = [ {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"}, {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"}, ] -sqlfluff = [ + +[package.dependencies] +SQLAlchemy = ">=1.3" + +[package.extras] +arrow = ["arrow (>=0.3.4)"] +babel = ["Babel (>=1.3)"] +color = ["colour (>=0.0.4)"] +encrypted = ["cryptography (>=0.6)"] +intervals = ["intervals (>=0.7.1)"] +password = ["passlib (>=1.6,<2.0)"] +pendulum = ["pendulum (>=2.0.5)"] +phone = ["phonenumbers (>=5.9.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +timezone = ["python-dateutil"] +url = ["furl (>=0.4.1)"] + +[[package]] +name = "sqlfluff" +version = "2.3.2" +description = "The SQL Linter for Humans" +optional = false +python-versions = ">=3.7" +files = [ {file = "sqlfluff-2.3.2-py3-none-any.whl", hash = "sha256:85c8b683e283ff632fe28529ddb60585ea2d1d3c614fc7a1db171632b99dcce3"}, {file = "sqlfluff-2.3.2.tar.gz", hash = "sha256:3403ce7e9133766d7336b7e26638657ec6cc9e5610e35186b7f02cc427dd49b7"}, ] -sqlparams = [ + +[package.dependencies] +appdirs = "*" +chardet = "*" +click = "*" +colorama = ">=0.3" +diff-cover = ">=2.5.0" +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +Jinja2 = "*" +pathspec = "*" +pytest = "*" +pyyaml = ">=5.1" +regex = "*" +tblib = "*" +toml = {version = "*", markers = "python_version < \"3.11\""} +tqdm = "*" +typing-extensions = "*" + +[[package]] +name = "sqlparams" +version = "6.0.1" +description = "Convert between various DB API 2.0 parameter styles." +optional = true +python-versions = ">=3.8" +files = [ {file = "sqlparams-6.0.1-py3-none-any.whl", hash = "sha256:566651376315c832876be4a0f58ffa23a23fab257d77ee492bdf8d301e169d0d"}, {file = "sqlparams-6.0.1.tar.gz", hash = "sha256:032b2f949d4afbcbfa24003f6fb407f2fc8468184e3d8ca3d59ba6b30d4935bf"}, ] -sqlparse = [ + +[[package]] +name = "sqlparse" +version = "0.4.4" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.5" +files = [ {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, ] -stevedore = [ + +[package.extras] +dev = ["build", "flake8"] +doc = ["sphinx"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, ] -sympy = [ + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "sympy" +version = "1.12" +description = "Computer algebra system (CAS) in Python" +optional = true +python-versions = ">=3.8" +files = [ {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, ] -tabulate = [ + +[package.dependencies] +mpmath = ">=0.19" + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, ] -tblib = [ + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tblib" +version = "2.0.0" +description = "Traceback serialization library." +optional = false +python-versions = ">=3.7" +files = [ {file = "tblib-2.0.0-py3-none-any.whl", hash = "sha256:9100bfa016b047d5b980d66e7efed952fbd20bd85b56110aaf473cb97d18709a"}, {file = "tblib-2.0.0.tar.gz", hash = "sha256:a6df30f272c08bf8be66e0775fad862005d950a6b8449b94f7c788731d70ecd7"}, ] -tenacity = [ + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, ] -termcolor = [ + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "termcolor" +version = "2.3.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.7" +files = [ {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"}, {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"}, ] -text-unidecode = [ + +[package.extras] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +optional = false +python-versions = "*" +files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] -thrift = [ + +[[package]] +name = "thrift" +version = "0.16.0" +description = "Python bindings for the Apache Thrift RPC system" +optional = true +python-versions = "*" +files = [ {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"}, ] -tokenizers = [ + +[package.dependencies] +six = ">=1.7.2" + +[package.extras] +all = ["tornado (>=4.0)", "twisted"] +tornado = ["tornado (>=4.0)"] +twisted = ["twisted"] + +[[package]] +name = "tokenizers" +version = "0.13.3" +description = "Fast and Customizable Tokenizers" +optional = true +python-versions = "*" +files = [ {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"}, {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"}, {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"}, @@ -9126,99 +8353,287 @@ tokenizers = [ {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"}, {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"}, ] -toml = [ + +[package.extras] +dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -tomli = [ + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -tomli-w = [ + +[[package]] +name = "tomli-w" +version = "1.0.0" +description = "A lil' TOML writer" +optional = false +python-versions = ">=3.7" +files = [ {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"}, {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, ] -tomlkit = [ + +[[package]] +name = "tomlkit" +version = "0.12.1" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] -tqdm = [ + +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, ] -typeapi = [ + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typeapi" +version = "2.1.1" +description = "" +optional = false +python-versions = ">=3.6.3,<4.0.0" +files = [ {file = "typeapi-2.1.1-py3-none-any.whl", hash = "sha256:ef41577f316bfd362572e727ba349dab80a7362318a80fc72e6a807017d04c5c"}, {file = "typeapi-2.1.1.tar.gz", hash = "sha256:49b3c1d3382e27dccbb59132a3a823c61954f679a0c61f119fd6d8470073a298"}, ] -types-awscrt = [ + +[package.dependencies] +typing-extensions = ">=3.0.0" + +[[package]] +name = "types-awscrt" +version = "0.19.1" +description = "Type annotations and code completion for awscrt" +optional = false +python-versions = ">=3.7,<4.0" +files = [ {file = "types_awscrt-0.19.1-py3-none-any.whl", hash = "sha256:68fffeb75396e9e7614cd930b2d52295f680230774750907bcafb56f11514043"}, {file = "types_awscrt-0.19.1.tar.gz", hash = "sha256:61833aa140e724a9098025610f4b8cde3dcf65b842631d7447378f9f5db4e1fd"}, ] -types-cachetools = [ + +[[package]] +name = "types-cachetools" +version = "5.3.0.6" +description = "Typing stubs for cachetools" +optional = false +python-versions = "*" +files = [ {file = "types-cachetools-5.3.0.6.tar.gz", hash = "sha256:595f0342d246c8ba534f5a762cf4c2f60ecb61e8002b8b2277fd5cf791d4e851"}, {file = "types_cachetools-5.3.0.6-py3-none-any.whl", hash = "sha256:f7f8a25bfe306f2e6bc2ad0a2f949d9e72f2d91036d509c36d3810bf728bc6e1"}, ] -types-click = [ + +[[package]] +name = "types-click" +version = "7.1.8" +description = "Typing stubs for click" +optional = false +python-versions = "*" +files = [ {file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"}, {file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"}, ] -types-deprecated = [ + +[[package]] +name = "types-deprecated" +version = "1.2.9.3" +description = "Typing stubs for Deprecated" +optional = false +python-versions = "*" +files = [ {file = "types-Deprecated-1.2.9.3.tar.gz", hash = "sha256:ef87327adf3e3c4a4c7d8e06e58f6476710d3466ecfb53c49efb080804a70ef3"}, {file = "types_Deprecated-1.2.9.3-py3-none-any.whl", hash = "sha256:24da9210763e5e1b3d0d4f6f8bba9ad3bb6af3fe7f6815fc37e3ede4681704f5"}, ] -types-protobuf = [ + +[[package]] +name = "types-protobuf" +version = "4.24.0.1" +description = "Typing stubs for protobuf" +optional = false +python-versions = "*" +files = [ {file = "types-protobuf-4.24.0.1.tar.gz", hash = "sha256:90adea3b693d6a40d8ef075c58fe6b5cc6e01fe1496301a7e6fc70398dcff92e"}, {file = "types_protobuf-4.24.0.1-py3-none-any.whl", hash = "sha256:df203a204e4ae97d4cca4c9cf725262579dd7857a19f9e7fc74871ccfa073c01"}, ] -types-psutil = [ + +[[package]] +name = "types-psutil" +version = "5.9.5.16" +description = "Typing stubs for psutil" +optional = false +python-versions = "*" +files = [ {file = "types-psutil-5.9.5.16.tar.gz", hash = "sha256:4e9b219efb625d3d04f6bf106934f87cab49aa41a94b0a3b3089403f47a79228"}, {file = "types_psutil-5.9.5.16-py3-none-any.whl", hash = "sha256:fec713104d5d143afea7b976cfa691ca1840f5d19e8714a5d02a96ebd061363e"}, ] -types-psycopg2 = [ + +[[package]] +name = "types-psycopg2" +version = "2.9.21.14" +description = "Typing stubs for psycopg2" +optional = false +python-versions = "*" +files = [ {file = "types-psycopg2-2.9.21.14.tar.gz", hash = "sha256:bf73a0ac4da4e278c89bf1b01fc596d5a5ac7a356cfe6ac0249f47b9e259f868"}, {file = "types_psycopg2-2.9.21.14-py3-none-any.whl", hash = "sha256:cd9c5350631f3bc6184ec8d48f2ed31d4ea660f89d0fffe78239450782f383c5"}, ] -types-python-dateutil = [ + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = "*" +files = [ {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, ] -types-pyyaml = [ + +[[package]] +name = "types-pyyaml" +version = "6.0.12.11" +description = "Typing stubs for PyYAML" +optional = false +python-versions = "*" +files = [ {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"}, {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"}, ] -types-requests = [ + +[[package]] +name = "types-requests" +version = "2.31.0.2" +description = "Typing stubs for requests" +optional = false +python-versions = "*" +files = [ {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, ] -types-s3transfer = [ + +[package.dependencies] +types-urllib3 = "*" + +[[package]] +name = "types-s3transfer" +version = "0.6.2" +description = "Type annotations and code completion for s3transfer" +optional = false +python-versions = ">=3.7,<4.0" +files = [ {file = "types_s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:1068877b6e59be5226fa3006ae64371ac9d5bc590dfdbd9c66fd0a075d3254ac"}, {file = "types_s3transfer-0.6.2.tar.gz", hash = "sha256:4ba9b483796fdcd026aa162ee03bdcedd2bf7d08e9387c820dcdd158b0102057"}, ] -types-setuptools = [ + +[[package]] +name = "types-setuptools" +version = "68.1.0.1" +description = "Typing stubs for setuptools" +optional = false +python-versions = "*" +files = [ {file = "types-setuptools-68.1.0.1.tar.gz", hash = "sha256:271ed8da44885cd9a701c86e48cc6d3cc988052260e72b3ce26c26b3028f86ed"}, {file = "types_setuptools-68.1.0.1-py3-none-any.whl", hash = "sha256:a9a0d2ca1da8a15924890d464adcee4004deb07b6a99bd0b1881eac5c73cb3a7"}, ] -types-simplejson = [ + +[[package]] +name = "types-simplejson" +version = "3.19.0.2" +description = "Typing stubs for simplejson" +optional = false +python-versions = "*" +files = [ {file = "types-simplejson-3.19.0.2.tar.gz", hash = "sha256:ebc81f886f89d99d6b80c726518aa2228bc77c26438f18fd81455e4f79f8ee1b"}, {file = "types_simplejson-3.19.0.2-py3-none-any.whl", hash = "sha256:8ba093dc7884f59b3e62aed217144085e675a269debc32678fd80e0b43b2b86f"}, ] -types-sqlalchemy = [ + +[[package]] +name = "types-sqlalchemy" +version = "1.4.53.38" +description = "Typing stubs for SQLAlchemy" +optional = false +python-versions = "*" +files = [ {file = "types-SQLAlchemy-1.4.53.38.tar.gz", hash = "sha256:5bb7463537e04e1aa5a3557eb725930df99226dcfd3c9bf93008025bfe5c169e"}, {file = "types_SQLAlchemy-1.4.53.38-py3-none-any.whl", hash = "sha256:7e60e74f823931cc9a9e8adb0a4c05e5533e6708b8a266807893a739faf4eaaa"}, ] -types-tqdm = [ + +[[package]] +name = "types-tqdm" +version = "4.66.0.2" +description = "Typing stubs for tqdm" +optional = false +python-versions = "*" +files = [ {file = "types-tqdm-4.66.0.2.tar.gz", hash = "sha256:9553a5e44c1d485fce19f505b8bd65c0c3e87e870678d1f2ed764ae59a55d45f"}, {file = "types_tqdm-4.66.0.2-py3-none-any.whl", hash = "sha256:13dddd38908834abdf0acdc2b70cab7ac4bcc5ad7356ced450471662e58a0ffc"}, ] -types-urllib3 = [ + +[[package]] +name = "types-urllib3" +version = "1.26.25.14" +description = "Typing stubs for urllib3" +optional = false +python-versions = "*" +files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, ] -typing-extensions = [ + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] -tzdata = [ + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] @@ -9248,30 +8663,86 @@ description = "Micro subset of unicode data files for linkify-it-py projects." optional = false python-versions = ">=3.7" files = [ -uc-micro-py = [ {file = "uc-micro-py-1.0.2.tar.gz", hash = "sha256:30ae2ac9c49f39ac6dce743bd187fcd2b574b16ca095fa74cd9396795c954c54"}, {file = "uc_micro_py-1.0.2-py3-none-any.whl", hash = "sha256:8c9110c309db9d9e87302e2f4ad2c3152770930d88ab385cd544e7a7e75f3de0"}, ] -unicodecsv = [ + +[package.extras] +test = ["coverage", "pytest", "pytest-cov"] + +[[package]] +name = "unicodecsv" +version = "0.14.1" +description = "Python2's stdlib csv module is nice, but it doesn't support unicode. This module is a drop-in replacement which *does*." +optional = false +python-versions = "*" +files = [ {file = "unicodecsv-0.14.1.tar.gz", hash = "sha256:018c08037d48649a0412063ff4eda26eaa81eff1546dbffa51fa5293276ff7fc"}, ] -universal-pathlib = [ + +[[package]] +name = "universal-pathlib" +version = "0.2.1" +description = "pathlib api extended to use fsspec backends" +optional = false +python-versions = ">=3.8" +files = [ {file = "universal_pathlib-0.2.1-py3-none-any.whl", hash = "sha256:bb14881f1c6c025c654a658c253b4cf89e8238dff6d3c847aa5723899227f85e"}, {file = "universal_pathlib-0.2.1.tar.gz", hash = "sha256:fda2f484d875c26079771f94acfef58647eed80efce75f0bf8824373b432e802"}, ] -uritemplate = [ + +[package.dependencies] +fsspec = ">=2022.1.0" + +[package.extras] +dev = ["adlfs", "aiohttp", "cheroot", "gcsfs", "moto[s3,server] (<5)", "mypy (==1.8.0)", "packaging", "pydantic", "pydantic-settings", "pylint (==2.17.4)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-mock (==3.12.0)", "pytest-sugar (==0.9.7)", "requests", "s3fs", "webdav4[fsspec]", "wsgidav"] +tests = ["mypy (==1.8.0)", "packaging", "pylint (==2.17.4)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-mock (==3.12.0)", "pytest-sugar (==0.9.7)"] + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.6" +files = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, ] -urllib3 = [ + +[[package]] +name = "urllib3" +version = "1.26.16" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] -validators = [ + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "validators" +version = "0.21.0" +description = "Python Data Validation for Humans™" +optional = true +python-versions = ">=3.8,<4.0" +files = [ {file = "validators-0.21.0-py3-none-any.whl", hash = "sha256:3470db6f2384c49727ee319afa2e97aec3f8fad736faa6067e0fd7f9eaf2c551"}, {file = "validators-0.21.0.tar.gz", hash = "sha256:245b98ab778ed9352a7269c6a8f6c2a839bed5b2a7e3e60273ce399d247dd4b3"}, ] -watchdog = [ + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.7" +files = [ {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, @@ -9300,23 +8771,79 @@ watchdog = [ {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, ] -wcwidth = [ + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, ] -weaviate-client = [ + +[[package]] +name = "weaviate-client" +version = "3.23.2" +description = "A python native Weaviate client" +optional = true +python-versions = ">=3.8" +files = [ {file = "weaviate-client-3.23.2.tar.gz", hash = "sha256:1c8c94df032dd2fa5a4ea615fc69ccb983ffad5cc02974f78c793839e61ac150"}, {file = "weaviate_client-3.23.2-py3-none-any.whl", hash = "sha256:88ffc38cca07806d64726cc74bc194c7da50b222aa4e2cd129f4c1f5e53e9b61"}, ] -werkzeug = [ + +[package.dependencies] +authlib = ">=1.1.0" +requests = ">=2.28.0,<=2.31.0" +tqdm = ">=4.59.0,<5.0.0" +validators = ">=0.18.2,<=0.21.0" + +[package.extras] +grpc = ["grpcio", "grpcio-tools"] + +[[package]] +name = "werkzeug" +version = "2.3.7" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"}, {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"}, ] -wheel = [ + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wheel" +version = "0.41.2" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ {file = "wheel-0.41.2-py3-none-any.whl", hash = "sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8"}, {file = "wheel-0.41.2.tar.gz", hash = "sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985"}, ] -win-precise-time = [ + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "win-precise-time" +version = "1.4.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ {file = "win-precise-time-1.4.2.tar.gz", hash = "sha256:89274785cbc5f2997e01675206da3203835a442c60fd97798415c6b3c179c0b9"}, {file = "win_precise_time-1.4.2-cp310-cp310-win32.whl", hash = "sha256:7fa13a2247c2ef41cd5e9b930f40716eacc7fc1f079ea72853bd5613fe087a1a"}, {file = "win_precise_time-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb8e44b0fc35fde268e8a781cdcd9f47d47abcd8089465d2d1d1063976411c8e"}, @@ -9331,7 +8858,14 @@ win-precise-time = [ {file = "win_precise_time-1.4.2-cp39-cp39-win32.whl", hash = "sha256:50d11a6ff92e1be96a8d4bee99ff6dc07a0ea0e2a392b0956bb2192e334f41ba"}, {file = "win_precise_time-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f510fa92d9c39ea533c983e1d62c7bc66fdf0a3e3c3bdda48d4ebb634ff7034"}, ] -wrapt = [ + +[[package]] +name = "wrapt" +version = "1.15.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, @@ -9408,15 +8942,45 @@ wrapt = [ {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] -wtforms = [ + +[[package]] +name = "wtforms" +version = "3.0.1" +description = "Form validation and rendering for Python web development." +optional = false +python-versions = ">=3.7" +files = [ {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, ] -yapf = [ + +[package.dependencies] +MarkupSafe = "*" + +[package.extras] +email = ["email-validator"] + +[[package]] +name = "yapf" +version = "0.33.0" +description = "A formatter for Python code." +optional = false +python-versions = "*" +files = [ {file = "yapf-0.33.0-py2.py3-none-any.whl", hash = "sha256:4c2b59bd5ffe46f3a7da48df87596877189148226ce267c16e8b44240e51578d"}, {file = "yapf-0.33.0.tar.gz", hash = "sha256:da62bdfea3df3673553351e6246abed26d9fe6780e548a5af9e70f6d2b4f5b9a"}, ] -yarl = [ + +[package.dependencies] +tomli = ">=2.0.1" + +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, @@ -9492,7 +9056,18 @@ yarl = [ {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, ] -zipp = [ + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.16.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, ] @@ -9526,4 +9101,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "ca6ca3ce92e469087463d0253d9e517c8ec43085576da4ab6199a238ec650743" +content-hash = "2615aef951d0a48f34c12d20bc6253d3b4ab0efa167877ff4d8aff3e2c15ea14" diff --git a/pyproject.toml b/pyproject.toml index e1d2dcd6d4..00ad902347 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,8 +144,8 @@ cryptography = "^41.0.7" google-api-python-client = ">=1.7.11" pytest-asyncio = "^0.23.5" types-sqlalchemy = "^1.4.53.38" -clickhouse-driver = "^0.2.7" ruff = "^0.3.2" +clickhouse-driver = "^0.2.7" [tool.poetry.group.pipeline] optional=true From 29b5a07f2bd828d41d516f30273a10a01c31148e Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 19 Mar 2024 16:21:59 +0200 Subject: [PATCH 024/127] Swap engine/primary key clause ordering #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 8 ++++---- dlt/destinations/impl/clickhouse/configuration.py | 4 +--- tests/load/clickhouse/test_clickhouse_table_builder.py | 4 +--- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 0170ac3d3b..3b901d38a2 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -212,6 +212,10 @@ def _get_table_update_sql( if generate_alter: return sql + # Default to 'ReplicatedMergeTree' if user didn't explicitly set a table engine hint. + # 'ReplicatedMergeTree' is the only supported engine for Clickhouse Cloud. + sql[0] = f"{sql[0]}\nENGINE = {table.get(TABLE_ENGINE_TYPE_HINT, 'replicated_merge_tree')}" + # TODO: Remove `unique` and `primary_key` default implementations. if primary_key_list := [ self.capabilities.escape_identifier(c["name"]) @@ -222,10 +226,6 @@ def _get_table_update_sql( else: sql[0] += "\nPRIMARY KEY tuple()" - # Default to 'ReplicatedMergeTree' if user didn't explicitly set a table engine hint. - # 'ReplicatedMergeTree' is the only supported engine for Clickhouse Cloud. - sql[0] = f"{sql[0]}\nENGINE = {table.get(TABLE_ENGINE_TYPE_HINT, 'replicated_merge_tree')}" - return sql def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index e8d01ba4b0..35bf130d42 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -82,9 +82,7 @@ def __init__( *, credentials: ClickhouseCredentials = None, dataset_name: str = None, - create_indexes: bool = False, - stage_name: str = None, - keep_staged_files: bool = True, + create_indexes: bool = True, destination_name: str = None, environment: str = None ) -> None: diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index da6a23f668..72902c77c2 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -23,13 +23,11 @@ def clickhouse_client(empty_schema: Schema) -> ClickhouseClient: ) -pytest.mark.usefixtures("empty_schema") - - def test_create_table(clickhouse_client: ClickhouseClient) -> None: statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) assert len(statements) == 1 sql = statements[0] + print(sql) sqlfluff.parse(sql, dialect="clickhouse") assert sql.strip().startswith("CREATE TABLE") From c9b1ae4648750f49f5be31e96a6c20d47d028b6d Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 19 Mar 2024 23:58:17 +0200 Subject: [PATCH 025/127] Pass basic tests #1055 Signed-off-by: Marcel Coetzee --- dlt/common/data_writers/escape.py | 2 +- dlt/destinations/impl/clickhouse/__init__.py | 4 +- .../impl/clickhouse/clickhouse.py | 53 ++++++--- .../impl/clickhouse/configuration.py | 6 - dlt/destinations/impl/clickhouse/factory.py | 4 - .../impl/clickhouse/sql_client.py | 17 ++- .../test_clickhouse_table_builder.py | 111 ++++++++++++------ 7 files changed, 128 insertions(+), 69 deletions(-) diff --git a/dlt/common/data_writers/escape.py b/dlt/common/data_writers/escape.py index 027e7b1554..153e66722c 100644 --- a/dlt/common/data_writers/escape.py +++ b/dlt/common/data_writers/escape.py @@ -174,4 +174,4 @@ def escape_clickhouse_literal(v: Any) -> Any: def escape_clickhouse_identifier(v: str) -> str: - return "`" + v.replace("`", "``").replace("\\", "\\\\") + '"' + return "`" + v.replace("`", "``").replace("\\", "\\\\") + "`" diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index acbb08ac9a..9a3560223c 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -7,9 +7,9 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["jsonl"] + caps.supported_loader_file_formats = ["jsonl", "parquet"] caps.preferred_staging_file_format = "jsonl" - caps.supported_staging_file_formats = ["jsonl"] + caps.supported_staging_file_formats = ["jsonl", "parquet"] caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 3b901d38a2..097224cb18 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,6 +1,7 @@ +import logging import os from copy import deepcopy -from typing import ClassVar, Optional, Dict, List, Sequence +from typing import ClassVar, Optional, Dict, List, Sequence, cast from urllib.parse import urlparse from dlt.common.configuration.specs import ( @@ -41,6 +42,8 @@ HINT_TO_CLICKHOUSE_ATTR: Dict[TColumnHint, str] = { "primary_key": "PRIMARY KEY", + "unique": "", # No unique constraints available in Clickhouse. + "foreign_key": "", # No foreign key constraints support in Clickhouse. } TABLE_ENGINE_TYPE_TO_CLICKHOUSE_ATTR: Dict[TTableEngineType, str] = { @@ -51,12 +54,13 @@ class ClickhouseTypeMapper(TypeMapper): sct_to_unbound_dbt = { - "complex": "JSON", + "complex": "String", "text": "String", "double": "Float64", "bool": "Boolean", "date": "Date", - "timestamp": "DateTime", + "timestamp": "DateTime('UTC')", + "time": "Time('UTC')", "bigint": "Int64", "binary": "String", "wei": "Decimal", @@ -65,7 +69,8 @@ class ClickhouseTypeMapper(TypeMapper): sct_to_dbt = { "decimal": "Decimal(%i,%i)", "wei": "Decimal(%i,%i)", - "timestamp": "DateTime(%i)", + "timestamp": "DateTime(%i, 'UTC')", + "time": "Time(%i ,'UTC')", } dbt_to_sct = { @@ -74,6 +79,9 @@ class ClickhouseTypeMapper(TypeMapper): "Boolean": "bool", "Date": "date", "DateTime": "timestamp", + "DateTime('UTC')": "timestamp", + "Time": "timestamp", + "Time('UTC')": "timestamp", "Int64": "bigint", "JSON": "complex", "Decimal": "decimal", @@ -190,7 +198,7 @@ def __init__( ) super().__init__(schema, config, self.sql_client) self.config: ClickhouseClientConfiguration = config - self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) if self.config.create_indexes else {} + self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) self.type_mapper = ClickhouseTypeMapper(self.capabilities) def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> LoadJob: @@ -213,10 +221,11 @@ def _get_table_update_sql( return sql # Default to 'ReplicatedMergeTree' if user didn't explicitly set a table engine hint. - # 'ReplicatedMergeTree' is the only supported engine for Clickhouse Cloud. - sql[0] = f"{sql[0]}\nENGINE = {table.get(TABLE_ENGINE_TYPE_HINT, 'replicated_merge_tree')}" + table_type = cast( + TTableEngineType, table.get(TABLE_ENGINE_TYPE_HINT, "replicated_merge_tree") + ) + sql[0] = f"{sql[0]}\nENGINE = {TABLE_ENGINE_TYPE_TO_CLICKHOUSE_ATTR.get(table_type)}" - # TODO: Remove `unique` and `primary_key` default implementations. if primary_key_list := [ self.capabilities.escape_identifier(c["name"]) for c in new_columns @@ -226,26 +235,38 @@ def _get_table_update_sql( else: sql[0] += "\nPRIMARY KEY tuple()" + # TODO: Apply sort order and cluster key hints. + return sql def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: - # The primary key definition is defined outside column specification. + # Build column definition. + # The primary key and sort order definition is defined outside column specification. hints_str = " ".join( - self.active_hints.get(hint, "") + self.active_hints.get(hint) for hint in self.active_hints.keys() - if c.get(hint, False) is True and hint != "primary_key" + if c.get(hint, False) is True + and hint not in ("primary_key", "sort") + and hint in self.active_hints ) + + # Alter table statements only accept `Nullable` modifiers. + type_with_nullability_modifier = ( + f"Nullable({self.type_mapper.to_db_type(c)})" + if c.get("nullable", True) + else self.type_mapper.to_db_type(c) + ) + return ( - f"{self.capabilities.escape_identifier(c['name'])} " - f"{self.type_mapper.to_db_type(c)} " - f"{hints_str} " - f"{self._gen_not_null(c.get('nullable', True))}" + f"{self.capabilities.escape_identifier(c['name'])} {type_with_nullability_modifier} {hints_str}" + .strip() ) # Clickhouse fields are not nullable by default. @staticmethod def _gen_not_null(v: bool) -> str: - return "NULL" if v else "NOT NULL" + # We use the `Nullable` modifier instead of NULL / NOT NULL modifiers to cater for ALTER statement. + pass def _from_db_type( self, ch_t: str, precision: Optional[int], scale: Optional[int] diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 35bf130d42..fd5b3276c0 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -63,11 +63,6 @@ class ClickhouseClientConfiguration(DestinationClientDwhWithStagingConfiguration # but they do not enforce uniqueness constraints. It permits duplicate values even for the primary key # columns within the same granule. # See: https://clickhouse.com/docs/en/optimize/sparse-primary-indexes - create_indexes: bool = True - """Whether `primary_key` column hint is applied. Note that Clickhouse has no unique constraint, - and primary keys don't guarantee uniqueness.""" - - __config_gen_annotations__: ClassVar[List[str]] = ["create_indexes"] def fingerprint(self) -> str: """Returns a fingerprint of host part of a connection string.""" @@ -82,7 +77,6 @@ def __init__( *, credentials: ClickhouseCredentials = None, dataset_name: str = None, - create_indexes: bool = True, destination_name: str = None, environment: str = None ) -> None: diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 90065c6582..2242d30565 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -29,7 +29,6 @@ def __init__( credentials: t.Union[ClickhouseCredentials, str, t.Dict[str, t.Any], Connection] = None, destination_name: str = None, environment: str = None, - create_indexes: bool = False, **kwargs: t.Any, ) -> None: """Configure the Clickhouse destination to use in a pipeline. @@ -41,14 +40,11 @@ def __init__( credentials: Credentials to connect to the clickhouse database. Can be an instance of `ClickhouseCredentials`, or a connection string in the format `clickhouse://user:password@host:port/database`. - create_indexes: Maps directly to the `create_indexes` attribute of the - `ClickhouseClientConfiguration` object. **kwargs: Additional arguments passed to the destination config. """ super().__init__( credentials=credentials, destination_name=destination_name, environment=environment, - create_indexes=create_indexes, **kwargs, ) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 87b5651668..1705c7c34e 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -29,7 +29,7 @@ raise_database_error, raise_open_connection_error, ) -from dlt.destinations.typing import DBTransaction, DBApi, DBApiCursor +from dlt.destinations.typing import DBTransaction, DBApi TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE = ( @@ -57,6 +57,8 @@ def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: self._conn = clickhouse_driver.connect(dsn=self.credentials.to_native_representation()) # TODO: Set timezone to UTC explicitly in each query. # https://github.com/ClickHouse/ClickHouse/issues/699 + with self._conn.cursor() as curr: + curr.execute("set allow_experimental_object_type = 1;") return self._conn @raise_open_connection_error @@ -118,6 +120,19 @@ def fully_qualified_dataset_name(self, escape: bool = True) -> str: ) return f"{database_name}.{dataset_name}" + def make_qualified_table_name(self, table_name: str, escape: bool = True) -> str: + database_name = ( + self.capabilities.escape_identifier(self.database_name) + if escape + else self.database_name + ) + dataset_table_name = ( + self.capabilities.escape_identifier(f"{self.dataset_name}_{table_name}") + if escape + else f"{self.dataset_name}_{table_name}" + ) + return f"{database_name}.{dataset_table_name}" + @classmethod def _make_database_exception(cls, ex: Exception) -> Exception: # type: ignore[return] if isinstance(ex, clickhouse_driver.dbapi.errors.OperationalError): diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 72902c77c2..bf19759e39 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -27,21 +27,40 @@ def test_create_table(clickhouse_client: ClickhouseClient) -> None: statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) assert len(statements) == 1 sql = statements[0] - print(sql) - sqlfluff.parse(sql, dialect="clickhouse") + + # sqlfluff struggles with clickhouse's backtick escape characters. + # sqlfluff.parse(sql, dialect="clickhouse") assert sql.strip().startswith("CREATE TABLE") - assert "EVENT_TEST_TABLE" in sql - assert '"COL1" NUMBER(19,0) NOT NULL' in sql - assert '"COL2" FLOAT NOT NULL' in sql - assert '"COL3" BOOLEAN NOT NULL' in sql - assert '"COL4" TIMESTAMP_TZ NOT NULL' in sql - assert '"COL5" VARCHAR' in sql - assert '"COL6" NUMBER(38,9) NOT NULL' in sql - assert '"COL7" BINARY' in sql - assert '"COL8" NUMBER(38,0)' in sql - assert '"COL9" VARIANT NOT NULL' in sql - assert '"COL10" DATE NOT NULL' in sql + assert "event_test_table" in sql + assert "`col1` Int64" in sql + assert "`col2` Float64" in sql + assert "`col3` Boolean" in sql + assert "`col4` DateTime('UTC')" in sql + assert "`col5` String" in sql + assert "`col6` Decimal(38,9)" in sql + assert "`col7` String" in sql + assert "`col8` Decimal(76,0)" in sql + assert "`col9` String" in sql + assert "`col10` Date" in sql + assert "`col11` DateTime" in sql + assert "`col1_null` Nullable(Int64)" in sql + assert "`col2_null` Nullable(Float64)" in sql + assert "`col3_null` Nullable(Boolean)" in sql + assert "`col4_null` Nullable(DateTime('UTC'))" in sql + assert "`col5_null` Nullable(String)" in sql + assert "`col6_null` Nullable(Decimal(38,9))" in sql + assert "`col7_null` Nullable(String)" in sql + assert "`col8_null` Nullable(Decimal(76,0))" in sql + assert "`col9_null` Nullable(String)" in sql + assert "`col10_null` Nullable(Date)" in sql + assert "`col11_null` Nullable(DateTime)" in sql + assert "`col1_precision` Int64" in sql + assert "`col4_precision` DateTime(3, 'UTC')" in sql + assert "`col5_precision` String" in sql + assert "`col6_precision` Decimal(6,2)" in sql + assert "`col7_precision` String" in sql + assert "`col11_precision` DateTime" in sql def test_alter_table(clickhouse_client: ClickhouseClient) -> None: @@ -49,44 +68,58 @@ def test_alter_table(clickhouse_client: ClickhouseClient) -> None: assert len(statements) == 1 sql = statements[0] - # TODO: sqlfluff doesn't parse clickhouse multi ADD COLUMN clause correctly - # sqlfluff.parse(sql, dialect='clickhouse') + # sqlfluff struggles with clickhouse's backtick escape characters. + # sqlfluff.parse(sql, dialect="clickhouse") + # Alter table statements only accept `Nullable` modifiers. assert sql.startswith("ALTER TABLE") assert sql.count("ALTER TABLE") == 1 - assert sql.count("ADD COLUMN") == 1 - assert '"EVENT_TEST_TABLE"' in sql - assert '"COL1" NUMBER(19,0) NOT NULL' in sql - assert '"COL2" FLOAT NOT NULL' in sql - assert '"COL3" BOOLEAN NOT NULL' in sql - assert '"COL4" TIMESTAMP_TZ NOT NULL' in sql - assert '"COL5" VARCHAR' in sql - assert '"COL6" NUMBER(38,9) NOT NULL' in sql - assert '"COL7" BINARY' in sql - assert '"COL8" NUMBER(38,0)' in sql - assert '"COL9" VARIANT NOT NULL' in sql - assert '"COL10" DATE' in sql + assert "event_test_table" in sql + assert "`col1` Int64" in sql + assert "`col2` Float64" in sql + assert "`col3` Boolean" in sql + assert "`col4` DateTime('UTC')" in sql + assert "`col5` String" in sql + assert "`col6` Decimal(38,9)" in sql + assert "`col7` String" in sql + assert "`col8` Decimal(76,0)" in sql + assert "`col9` String" in sql + assert "`col10` Date" in sql + assert "`col11` DateTime" in sql + assert "`col1_null` Nullable(Int64)" in sql + assert "`col2_null` Nullable(Float64)" in sql + assert "`col3_null` Nullable(Boolean)" in sql + assert "`col4_null` Nullable(DateTime('UTC'))" in sql + assert "`col5_null` Nullable(String)" in sql + assert "`col6_null` Nullable(Decimal(38,9))" in sql + assert "`col7_null` Nullable(String)" in sql + assert "`col8_null` Nullable(Decimal(76,0))" in sql + assert "`col9_null` Nullable(String)" in sql + assert "`col10_null` Nullable(Date)" in sql + assert "`col11_null` Nullable(DateTime)" in sql + assert "`col1_precision` Int64" in sql + assert "`col4_precision` DateTime(3, 'UTC')" in sql + assert "`col5_precision` String" in sql + assert "`col6_precision` Decimal(6,2)" in sql + assert "`col7_precision` String" in sql + assert "`col11_precision` DateTime" in sql mod_table = deepcopy(TABLE_UPDATE) mod_table.pop(0) sql = clickhouse_client._get_table_update_sql("event_test_table", mod_table, True)[0] - assert '"COL1"' not in sql - assert '"COL2" FLOAT NOT NULL' in sql + assert "`col1`" not in sql + assert "`col2` Float64" in sql -def test_create_table_with_partition_and_cluster(clickhouse_client: ClickhouseClient) -> None: +@pytest.mark.usefixtures("empty_schema") +def test_create_table_with_primary_keys(clickhouse_client: ClickhouseClient) -> None: mod_update = deepcopy(TABLE_UPDATE) - # timestamp - mod_update[3]["partition"] = True - mod_update[4]["cluster"] = True - mod_update[1]["cluster"] = True + + mod_update[1]["primary_key"] = True + mod_update[4]["primary_key"] = True statements = clickhouse_client._get_table_update_sql("event_test_table", mod_update, False) assert len(statements) == 1 sql = statements[0] - # TODO: Can't parse cluster by - # sqlfluff.parse(sql, dialect="clickhouse") - - # clustering must be the last - assert sql.endswith('CLUSTER BY ("COL2","COL5")') + assert sql.endswith("PRIMARY KEY (`col2`, `col5`)") From 1e96a78d7f441cc1ed9cdcb4f0bf3fb4c2c1d6bf Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 20 Mar 2024 00:36:59 +0200 Subject: [PATCH 026/127] Add Clickhouse tests and improve its configuration handling Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 3 + .../impl/clickhouse/sql_client.py | 4 -- .../test_clickhouse_table_builder.py | 49 ++++++++++++++- tests/load/pipeline/test_clickhouse.py | 61 +++++++++++++++++++ tests/load/utils.py | 27 +++++++- 5 files changed, 137 insertions(+), 7 deletions(-) create mode 100644 tests/load/pipeline/test_clickhouse.py diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 9a3560223c..f505378152 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -14,6 +14,9 @@ def capabilities() -> DestinationCapabilitiesContext: caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal + caps.max_identifier_length = 65536 + caps.max_column_identifier_length = 65536 + caps.schema_supports_numeric_precision = True # Use 'Decimal128' with these defaults. # https://clickhouse.com/docs/en/sql-reference/data-types/decimal diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 1705c7c34e..4b56725dc4 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -55,10 +55,6 @@ def __init__(self, dataset_name: str, credentials: ClickhouseCredentials) -> Non def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: self._conn = clickhouse_driver.connect(dsn=self.credentials.to_native_representation()) - # TODO: Set timezone to UTC explicitly in each query. - # https://github.com/ClickHouse/ClickHouse/issues/699 - with self._conn.cursor() as curr: - curr.execute("set allow_experimental_object_type = 1;") return self._conn @raise_open_connection_error diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index bf19759e39..92d617e7c2 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -1,9 +1,10 @@ from copy import deepcopy import pytest -import sqlfluff +from dlt.common.configuration import resolve_configuration from dlt.common.schema import Schema +from dlt.common.utils import custom_environ, digest128 from dlt.common.utils import uniq_id from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient from dlt.destinations.impl.clickhouse.configuration import ( @@ -23,6 +24,30 @@ def clickhouse_client(empty_schema: Schema) -> ClickhouseClient: ) +def test_clickhouse_configuration() -> None: + # Check names normalized. + with custom_environ( + { + "DESTINATION__CLICKHOUSE__CREDENTIALS__USERNAME": "username", + "DESTINATION__CLICKHOUSE__CREDENTIALS__HOST": "host", + "DESTINATION__CLICKHOUSE__CREDENTIALS__DATABASE": "mydb", + "DESTINATION__CLICKHOUSE__CREDENTIALS__PASSWORD": "fuss_do_rah", + } + ): + C = resolve_configuration(ClickhouseCredentials(), sections=("destination", "clickhouse")) + assert C.database == "mydb" + assert C.password == "fuss_do_rah" + + # Check fingerprint. + assert ClickhouseClientConfiguration().fingerprint() == "" + # Based on host. + c = resolve_configuration( + ClickhouseCredentials(), + explicit_value="clickhouse://user1:pass@host1/db1", + ) + assert ClickhouseClientConfiguration(credentials=c).fingerprint() == digest128("host1") + + def test_create_table(clickhouse_client: ClickhouseClient) -> None: statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) assert len(statements) == 1 @@ -123,3 +148,25 @@ def test_create_table_with_primary_keys(clickhouse_client: ClickhouseClient) -> sql = statements[0] assert sql.endswith("PRIMARY KEY (`col2`, `col5`)") + + +@pytest.mark.skip( + "Only `primary_key` hint has been implemented so far, which isn't specified inline with the" + " column definition." +) +def test_create_table_with_hints(client: ClickhouseClient) -> None: + mod_update = deepcopy(TABLE_UPDATE) + + mod_update[0]["primary_key"] = True + mod_update[0]["sort"] = True + mod_update[1]["cluster"] = True + mod_update[4]["cluster"] = True + + sql = client._get_table_update_sql("event_test_table", TABLE_UPDATE, True) + + assert "`col1` bigint SORTKEY NOT NULL" in sql + assert "`col2` double precision DISTKEY NOT NULL" in sql + assert "`col5` varchar(max) DISTKEY" in sql + # no hints + assert "`col3` boolean NOT NULL" in sql + assert "`col4` timestamp with time zone NOT NULL" in sql diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py new file mode 100644 index 0000000000..79fcea53da --- /dev/null +++ b/tests/load/pipeline/test_clickhouse.py @@ -0,0 +1,61 @@ +import pytest + +import dlt +from dlt.common.utils import uniq_id +from tests.load.pipeline.utils import destinations_configs, DestinationTestConfiguration +from tests.load.pipeline.utils import load_table_counts + + +@pytest.mark.parametrize( + "destination_config", + destinations_configs(default_staging_configs=True, subset=["clickhouse"]), + ids=lambda x: x.name, +) +def test_clickhouse_destinations(destination_config: DestinationTestConfiguration) -> None: + pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) + + @dlt.resource(name="items", write_disposition="append") + def items(): + yield { + "id": 1, + "name": "item", + "sub_items": [{"id": 101, "name": "sub item 101"}, {"id": 101, "name": "sub item 102"}], + } + + pipeline.run(items, loader_file_format=destination_config.file_format) + + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 + assert table_counts["_dlt_loads"] == 1 + + # Load again with schema evolution. + @dlt.resource(name="items", write_disposition="append") + def items2(): + yield { + "id": 1, + "name": "item", + "new_field": "hello", + "sub_items": [ + { + "id": 101, + "name": "sub item 101", + "other_new_field": "hello 101", + }, + { + "id": 101, + "name": "sub item 102", + "other_new_field": "hello 102", + }, + ], + } + + pipeline.run(items2) + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 2 + assert table_counts["items__sub_items"] == 4 + assert table_counts["_dlt_loads"] == 2 diff --git a/tests/load/utils.py b/tests/load/utils.py index 7b4cf72b47..0574b719e7 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -137,7 +137,7 @@ def setup(self) -> None: def setup_pipeline( self, pipeline_name: str, dataset_name: str = None, full_refresh: bool = False, **kwargs ) -> dlt.Pipeline: - """Convenience method to setup pipeline with this configuration""" + """Convenience method to set up a pipeline with this configuration.""" self.setup() pipeline = dlt.pipeline( pipeline_name=pipeline_name, @@ -175,7 +175,7 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration(destination=destination) for destination in SQL_DESTINATIONS - if destination not in ("athena", "mssql", "synapse", "databricks") + if destination not in ("athena", "mssql", "synapse", "databricks", "clickhouse") ] destination_configs += [ DestinationTestConfiguration(destination="duckdb", file_format="parquet") @@ -208,6 +208,15 @@ def destinations_configs( extra_info="az-authorization", ) ] + destination_configs += [ + DestinationTestConfiguration( + destination="clickhouse", + file_format="jsonl", + bucket_url=AWS_BUCKET, + supports_merge=True, + supports_dbt=False, + ) + ] destination_configs += [ DestinationTestConfiguration(destination="mssql", supports_dbt=False), DestinationTestConfiguration(destination="synapse", supports_dbt=False), @@ -295,6 +304,13 @@ def destinations_configs( bucket_url=AWS_BUCKET, extra_info="s3-authorization", ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=AWS_BUCKET, + extra_info="s3-integration", + ), DestinationTestConfiguration( destination="synapse", staging="filesystem", @@ -327,6 +343,13 @@ def destinations_configs( bucket_url=AWS_BUCKET, extra_info="credential-forwarding", ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=AWS_BUCKET, + extra_info="credential-forwarding", + ), DestinationTestConfiguration( destination="bigquery", staging="filesystem", From ab50287e6de6593e97a0222c8a2e82b669294baf Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 20 Mar 2024 13:11:46 +0200 Subject: [PATCH 027/127] Format and Lint Signed-off-by: Marcel Coetzee --- .../custom_destination_bigquery.py | 5 ++++- .../code/custom_destination_bigquery-snippets.py | 1 - 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/examples/custom_destination_bigquery/custom_destination_bigquery.py b/docs/examples/custom_destination_bigquery/custom_destination_bigquery.py index fcd964a980..624888f70a 100644 --- a/docs/examples/custom_destination_bigquery/custom_destination_bigquery.py +++ b/docs/examples/custom_destination_bigquery/custom_destination_bigquery.py @@ -15,6 +15,7 @@ # format: "your-project.your_dataset.your_table" BIGQUERY_TABLE_ID = "chat-analytics-rasa-ci.ci_streaming_insert.natural-disasters" + # dlt sources @dlt.resource(name="natural_disasters") def resource(url: str): @@ -38,6 +39,7 @@ def resource(url: str): ) yield table + # dlt biquery custom destination # we can use the dlt provided credentials class # to retrieve the gcp credentials from the secrets @@ -58,6 +60,7 @@ def bigquery_insert( load_job = client.load_table_from_file(f, BIGQUERY_TABLE_ID, job_config=job_config) load_job.result() # Waits for the job to complete. + if __name__ == "__main__": # run the pipeline and print load results pipeline = dlt.pipeline( @@ -68,4 +71,4 @@ def bigquery_insert( ) load_info = pipeline.run(resource(url=OWID_DISASTERS_URL)) - print(load_info) \ No newline at end of file + print(load_info) diff --git a/docs/website/docs/examples/custom_destination_bigquery/code/custom_destination_bigquery-snippets.py b/docs/website/docs/examples/custom_destination_bigquery/code/custom_destination_bigquery-snippets.py index 7d1d4a8dc2..16ff9c22b8 100644 --- a/docs/website/docs/examples/custom_destination_bigquery/code/custom_destination_bigquery-snippets.py +++ b/docs/website/docs/examples/custom_destination_bigquery/code/custom_destination_bigquery-snippets.py @@ -79,4 +79,3 @@ def bigquery_insert( print(load_info) # @@@DLT_SNIPPET_END example assert_load_info(load_info) - From 86685fd684bff29c8c016d9f20bfb4b5d9055e8d Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 23 Mar 2024 21:59:20 +0200 Subject: [PATCH 028/127] Improve DRYness #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 18 ++--- .../impl/clickhouse/sql_client.py | 53 ++++++++++++--- dlt/destinations/impl/clickhouse/utils.py | 17 +++-- dlt/helpers/streamlit_app/utils.py | 4 +- docs/examples/chess_production/chess.py | 12 ++-- docs/examples/connector_x_arrow/load_arrow.py | 2 + docs/examples/google_sheets/google_sheets.py | 5 +- docs/examples/incremental_loading/zendesk.py | 8 +-- docs/examples/nested_data/nested_data.py | 2 + .../pdf_to_weaviate/pdf_to_weaviate.py | 5 +- docs/examples/qdrant_zendesk/qdrant.py | 9 +-- docs/examples/transformers/pokemon.py | 4 +- docs/tools/fix_grammar_gpt.py | 5 +- tests/load/clickhouse/test_utls.py | 12 ++-- tests/load/pipeline/test_clickhouse.py | 6 +- tests/load/utils.py | 66 ++++++++++++------- 16 files changed, 149 insertions(+), 79 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 097224cb18..4524ff8d2b 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,4 +1,3 @@ -import logging import os from copy import deepcopy from typing import ClassVar, Optional, Dict, List, Sequence, cast @@ -30,7 +29,9 @@ from dlt.destinations.impl.clickhouse.sql_client import ClickhouseSqlClient from dlt.destinations.impl.clickhouse.utils import ( convert_storage_to_http_scheme, - render_s3_table_function, + render_object_storage_table_function, + FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING, + SUPPORTED_FILE_FORMATS, ) from dlt.destinations.job_client_impl import ( SqlJobClientWithStaging, @@ -130,22 +131,22 @@ def __init__( bucket_url = urlparse(bucket_path) bucket_scheme = bucket_url.scheme + file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) table_function: str if bucket_scheme in ("s3", "gs", "gcs"): bucket_http_url = convert_storage_to_http_scheme(bucket_url) table_function = ( - render_s3_table_function( + render_object_storage_table_function( bucket_http_url, staging_credentials.aws_secret_access_key, staging_credentials.aws_secret_access_key, - file_format=file_extension, # type: ignore[arg-type] + file_format=file_extension, ) if isinstance(staging_credentials, AwsCredentialsWithoutDefaults) - else render_s3_table_function( - bucket_http_url, - file_format=file_extension, # type: ignore[arg-type] + else render_object_storage_table_function( + bucket_http_url, file_format=file_extension ) ) elif bucket_scheme in ("az", "abfs"): @@ -159,8 +160,7 @@ def __init__( container_name = bucket_url.netloc blobpath = bucket_url.path - format_mapping = {"jsonl": "JSONEachRow", "parquet": "Parquet"} - clickhouse_format = format_mapping[file_extension] + clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] table_function = ( f"azureBlobStorage('{storage_account_url}','{container_name}','{ blobpath }','{ account_name }','{ account_key }','{ clickhouse_format}')" diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 4b56725dc4..82c3abb776 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -6,12 +6,10 @@ Optional, Sequence, ClassVar, - Union, ) import clickhouse_driver # type: ignore[import-untyped] import clickhouse_driver.errors # type: ignore[import-untyped] -from clickhouse_driver.dbapi import Connection # type: ignore[import-untyped] from clickhouse_driver.dbapi.extras import DictCursor # type: ignore[import-untyped] from dlt.common.destination import DestinationCapabilitiesContext @@ -54,7 +52,15 @@ def __init__(self, dataset_name: str, credentials: ClickhouseCredentials) -> Non self.database_name = credentials.database def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: - self._conn = clickhouse_driver.connect(dsn=self.credentials.to_native_representation()) + self._conn = clickhouse_driver.dbapi.connect( + dsn=self.credentials.to_native_representation() + ) + with self._conn.cursor() as cur: + # Set session settings. There doesn't seem to be a way to set these + # without using the library's top-level, non-dbapi2 client. + cur.execute("set allow_experimental_object_type = 1") + cur.execute("set allow_experimental_lightweight_delete = 1") + return self._conn @raise_open_connection_error @@ -89,6 +95,29 @@ def execute_sql( with self.execute_query(sql, *args, **kwargs) as curr: return None if curr.description is None else curr.fetchall() + def create_dataset(self) -> None: + # Clickhouse doesn't have schemas. + pass + + def drop_dataset(self) -> None: + # Since Clickhouse doesn't have schemas, we need to drop all tables in our virtual schema, + # or collection of tables that has the `dataset_name` as a prefix. + to_drop_results = self.execute_sql( + """ + SELECT name + FROM system.tables + WHERE database = %(db_name)s + AND name LIKE %(dataset_name)s + """, + {"db_name": self.database_name, "dataset_name": self.dataset_name}, + ) + for to_drop_result in to_drop_results: + table = to_drop_result[0] + self.execute_sql( + "DROP TABLE %(database)s.%(table)s SYNC", + {"database": self.database_name, "table": table}, + ) + @contextmanager @raise_database_error def execute_query( @@ -97,7 +126,6 @@ def execute_query( cur: clickhouse_driver.dbapi.connection.Cursor with self._conn.cursor() as cur: try: - # TODO: Clickhouse driver only accepts pyformat `...WHERE name=%(name)s` parameter marker arguments. cur.execute(query, args or (kwargs or None)) yield ClickhouseDBApiCursorImpl(cur) # type: ignore[abstract] except clickhouse_driver.dbapi.Error: @@ -143,7 +171,7 @@ def _make_database_exception(cls, ex: Exception) -> Exception: # type: ignore[r clickhouse_driver.dbapi.errors.InternalError, ), ): - if term := cls._maybe_make_terminal_exception_from_data_error(ex): + if term := cls._maybe_make_terminal_exception_from_data_error(): return term else: return DatabaseTransientException(ex) @@ -161,12 +189,17 @@ def _make_database_exception(cls, ex: Exception) -> Exception: # type: ignore[r else: return ex + def has_dataset(self) -> bool: + query = """ + SELECT 1 FROM INFORMATION_SCHEMA.SCHEMATA WHERE + catalog_name = %(database)s AND schema_name = %(table)s + """ + database, table = self.fully_qualified_dataset_name(escape=False).split(".", 2) + rows = self.execute_sql(query, {"database": database, "table": table}) + return len(rows) > 0 + @staticmethod - def _maybe_make_terminal_exception_from_data_error( - ex: Union[ - clickhouse_driver.dbapi.errors.DataError, clickhouse_driver.dbapi.errors.InternalError - ] - ) -> Optional[Exception]: + def _maybe_make_terminal_exception_from_data_error() -> Optional[Exception]: return None @staticmethod diff --git a/dlt/destinations/impl/clickhouse/utils.py b/dlt/destinations/impl/clickhouse/utils.py index 6297712943..8ab67d6522 100644 --- a/dlt/destinations/impl/clickhouse/utils.py +++ b/dlt/destinations/impl/clickhouse/utils.py @@ -1,10 +1,14 @@ -from typing import Union, Optional, Literal -from urllib.parse import urlparse, ParseResult, urlunparse +from typing import Union, Optional, Literal, Dict +from urllib.parse import urlparse, ParseResult from jinja2 import Template -S3_TABLE_FUNCTION_FILE_FORMATS = Literal["jsonl", "parquet"] +SUPPORTED_FILE_FORMATS = Literal["jsonl", "parquet"] +FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING: Dict[SUPPORTED_FILE_FORMATS, str] = { + "jsonl": "JSONEachRow", + "parquet": "Parquet", +} def convert_storage_to_http_scheme( @@ -40,17 +44,16 @@ def convert_storage_to_http_scheme( raise Exception(f"Error converting storage URL to HTTP protocol: '{url}'") from e -def render_s3_table_function( +def render_object_storage_table_function( url: str, access_key_id: Optional[str] = None, secret_access_key: Optional[str] = None, - file_format: Optional[S3_TABLE_FUNCTION_FILE_FORMATS] = "jsonl", + file_format: SUPPORTED_FILE_FORMATS = "jsonl", ) -> str: if file_format not in ["parquet", "jsonl"]: raise ValueError("Clickhouse s3/gcs staging only supports 'parquet' and 'jsonl'.") - format_mapping = {"jsonl": "JSONEachRow", "parquet": "Parquet"} - clickhouse_format = format_mapping[file_format] + clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_format] template = Template( """s3('{{ url }}'{% if access_key_id and secret_access_key %},'{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}')""" diff --git a/dlt/helpers/streamlit_app/utils.py b/dlt/helpers/streamlit_app/utils.py index 6b2dab495c..cf1728c33b 100644 --- a/dlt/helpers/streamlit_app/utils.py +++ b/dlt/helpers/streamlit_app/utils.py @@ -38,9 +38,7 @@ def render_with_pipeline(render_func: Callable[..., None]) -> None: render_func(pipeline) -def query_using_cache( - pipeline: dlt.Pipeline, ttl: int -) -> Callable[..., Optional[pd.DataFrame]]: +def query_using_cache(pipeline: dlt.Pipeline, ttl: int) -> Callable[..., Optional[pd.DataFrame]]: @st.cache_data(ttl=ttl) def do_query( # type: ignore[return] query: str, diff --git a/docs/examples/chess_production/chess.py b/docs/examples/chess_production/chess.py index d9e138187f..e2d0b9c10d 100644 --- a/docs/examples/chess_production/chess.py +++ b/docs/examples/chess_production/chess.py @@ -6,6 +6,7 @@ from dlt.common.typing import StrAny, TDataItems from dlt.sources.helpers.requests import client + @dlt.source def chess( chess_url: str = dlt.config.value, @@ -56,6 +57,7 @@ def players_games(username: Any) -> Iterator[TDataItems]: MAX_PLAYERS = 5 + def load_data_with_retry(pipeline, data): try: for attempt in Retrying( @@ -65,9 +67,7 @@ def load_data_with_retry(pipeline, data): reraise=True, ): with attempt: - logger.info( - f"Running the pipeline, attempt={attempt.retry_state.attempt_number}" - ) + logger.info(f"Running the pipeline, attempt={attempt.retry_state.attempt_number}") load_info = pipeline.run(data) logger.info(str(load_info)) @@ -89,9 +89,7 @@ def load_data_with_retry(pipeline, data): # print the information on the first load package and all jobs inside logger.info(f"First load package info: {load_info.load_packages[0]}") # print the information on the first completed job in first load package - logger.info( - f"First completed job info: {load_info.load_packages[0].jobs['completed_jobs'][0]}" - ) + logger.info(f"First completed job info: {load_info.load_packages[0].jobs['completed_jobs'][0]}") # check for schema updates: schema_updates = [p.schema_update for p in load_info.load_packages] @@ -149,4 +147,4 @@ def load_data_with_retry(pipeline, data): ) # get data for a few famous players data = chess(chess_url="https://api.chess.com/pub/", max_players=MAX_PLAYERS) - load_data_with_retry(pipeline, data) \ No newline at end of file + load_data_with_retry(pipeline, data) diff --git a/docs/examples/connector_x_arrow/load_arrow.py b/docs/examples/connector_x_arrow/load_arrow.py index 06ca4e17b3..b3c654cef9 100644 --- a/docs/examples/connector_x_arrow/load_arrow.py +++ b/docs/examples/connector_x_arrow/load_arrow.py @@ -3,6 +3,7 @@ import dlt from dlt.sources.credentials import ConnectionStringCredentials + def read_sql_x( conn_str: ConnectionStringCredentials = dlt.secrets.value, query: str = dlt.config.value, @@ -14,6 +15,7 @@ def read_sql_x( protocol="binary", ) + def genome_resource(): # create genome resource with merge on `upid` primary key genome = dlt.resource( diff --git a/docs/examples/google_sheets/google_sheets.py b/docs/examples/google_sheets/google_sheets.py index 8a93df9970..1ba330e4ca 100644 --- a/docs/examples/google_sheets/google_sheets.py +++ b/docs/examples/google_sheets/google_sheets.py @@ -9,6 +9,7 @@ ) from dlt.common.typing import DictStrAny, StrAny + def _initialize_sheets( credentials: Union[GcpOAuthCredentials, GcpServiceAccountCredentials] ) -> Any: @@ -16,6 +17,7 @@ def _initialize_sheets( service = build("sheets", "v4", credentials=credentials.to_native_credentials()) return service + @dlt.source def google_spreadsheet( spreadsheet_id: str, @@ -55,6 +57,7 @@ def get_sheet(sheet_name: str) -> Iterator[DictStrAny]: for name in sheet_names ] + if __name__ == "__main__": pipeline = dlt.pipeline(destination="duckdb") # see example.secrets.toml to where to put credentials @@ -67,4 +70,4 @@ def get_sheet(sheet_name: str) -> Iterator[DictStrAny]: sheet_names=range_names, ) ) - print(info) \ No newline at end of file + print(info) diff --git a/docs/examples/incremental_loading/zendesk.py b/docs/examples/incremental_loading/zendesk.py index 4b8597886a..6113f98793 100644 --- a/docs/examples/incremental_loading/zendesk.py +++ b/docs/examples/incremental_loading/zendesk.py @@ -6,12 +6,11 @@ from dlt.common.typing import TAnyDateTime from dlt.sources.helpers.requests import client + @dlt.source(max_table_nesting=2) def zendesk_support( credentials: Dict[str, str] = dlt.secrets.value, - start_date: Optional[TAnyDateTime] = pendulum.datetime( # noqa: B008 - year=2000, month=1, day=1 - ), + start_date: Optional[TAnyDateTime] = pendulum.datetime(year=2000, month=1, day=1), # noqa: B008 end_date: Optional[TAnyDateTime] = None, ): """ @@ -113,6 +112,7 @@ def get_pages( if not response_json["end_of_stream"]: get_url = response_json["next_page"] + if __name__ == "__main__": # create dlt pipeline pipeline = dlt.pipeline( @@ -120,4 +120,4 @@ def get_pages( ) load_info = pipeline.run(zendesk_support()) - print(load_info) \ No newline at end of file + print(load_info) diff --git a/docs/examples/nested_data/nested_data.py b/docs/examples/nested_data/nested_data.py index 3464448de6..7f85f0522e 100644 --- a/docs/examples/nested_data/nested_data.py +++ b/docs/examples/nested_data/nested_data.py @@ -13,6 +13,7 @@ CHUNK_SIZE = 10000 + # You can limit how deep dlt goes when generating child tables. # By default, the library will descend and generate child tables # for all nested lists, without a limit. @@ -81,6 +82,7 @@ def load_documents(self) -> Iterator[TDataItem]: while docs_slice := list(islice(cursor, CHUNK_SIZE)): yield map_nested_in_place(convert_mongo_objs, docs_slice) + def convert_mongo_objs(value: Any) -> Any: if isinstance(value, (ObjectId, Decimal128)): return str(value) diff --git a/docs/examples/pdf_to_weaviate/pdf_to_weaviate.py b/docs/examples/pdf_to_weaviate/pdf_to_weaviate.py index 8f7833e7d7..e7f57853ed 100644 --- a/docs/examples/pdf_to_weaviate/pdf_to_weaviate.py +++ b/docs/examples/pdf_to_weaviate/pdf_to_weaviate.py @@ -4,6 +4,7 @@ from dlt.destinations.impl.weaviate import weaviate_adapter from PyPDF2 import PdfReader + @dlt.resource(selected=False) def list_files(folder_path: str): folder_path = os.path.abspath(folder_path) @@ -15,6 +16,7 @@ def list_files(folder_path: str): "mtime": os.path.getmtime(file_path), } + @dlt.transformer(primary_key="page_id", write_disposition="merge") def pdf_to_text(file_item, separate_pages: bool = False): if not separate_pages: @@ -28,6 +30,7 @@ def pdf_to_text(file_item, separate_pages: bool = False): page_item["page_id"] = file_item["file_name"] + "_" + str(page_no) yield page_item + pipeline = dlt.pipeline(pipeline_name="pdf_to_text", destination="weaviate") # this constructs a simple pipeline that: (1) reads files from "invoices" folder (2) filters only those ending with ".pdf" @@ -51,4 +54,4 @@ def pdf_to_text(file_item, separate_pages: bool = False): client = weaviate.Client("http://localhost:8080") # get text of all the invoices in InvoiceText class we just created above -print(client.query.get("InvoiceText", ["text", "file_name", "mtime", "page_id"]).do()) \ No newline at end of file +print(client.query.get("InvoiceText", ["text", "file_name", "mtime", "page_id"]).do()) diff --git a/docs/examples/qdrant_zendesk/qdrant.py b/docs/examples/qdrant_zendesk/qdrant.py index 300d8dc6ad..bd0cbafc99 100644 --- a/docs/examples/qdrant_zendesk/qdrant.py +++ b/docs/examples/qdrant_zendesk/qdrant.py @@ -10,13 +10,12 @@ from dlt.common.configuration.inject import with_config + # function from: https://github.com/dlt-hub/verified-sources/tree/master/sources/zendesk @dlt.source(max_table_nesting=2) def zendesk_support( credentials: Dict[str, str] = dlt.secrets.value, - start_date: Optional[TAnyDateTime] = pendulum.datetime( # noqa: B008 - year=2000, month=1, day=1 - ), + start_date: Optional[TAnyDateTime] = pendulum.datetime(year=2000, month=1, day=1), # noqa: B008 end_date: Optional[TAnyDateTime] = None, ): """ @@ -80,6 +79,7 @@ def _parse_date_or_none(value: Optional[str]) -> Optional[pendulum.DateTime]: return None return ensure_pendulum_datetime(value) + # modify dates to return datetime objects instead def _fix_date(ticket): ticket["updated_at"] = _parse_date_or_none(ticket["updated_at"]) @@ -87,6 +87,7 @@ def _fix_date(ticket): ticket["due_at"] = _parse_date_or_none(ticket["due_at"]) return ticket + # function from: https://github.com/dlt-hub/verified-sources/tree/master/sources/zendesk def get_pages( url: str, @@ -127,6 +128,7 @@ def get_pages( if not response_json["end_of_stream"]: get_url = response_json["next_page"] + if __name__ == "__main__": # create a pipeline with an appropriate name pipeline = dlt.pipeline( @@ -146,7 +148,6 @@ def get_pages( print(load_info) - # running the Qdrant client to connect to your Qdrant database @with_config(sections=("destination", "qdrant", "credentials")) diff --git a/docs/examples/transformers/pokemon.py b/docs/examples/transformers/pokemon.py index 2181c33259..ca32c570ef 100644 --- a/docs/examples/transformers/pokemon.py +++ b/docs/examples/transformers/pokemon.py @@ -1,6 +1,7 @@ import dlt from dlt.sources.helpers import requests + @dlt.source(max_table_nesting=2) def source(pokemon_api_url: str): """""" @@ -44,6 +45,7 @@ def species(pokemon_details): return (pokemon_list | pokemon, pokemon_list | pokemon | species) + if __name__ == "__main__": # build duck db pipeline pipeline = dlt.pipeline( @@ -52,4 +54,4 @@ def species(pokemon_details): # the pokemon_list resource does not need to be loaded load_info = pipeline.run(source("https://pokeapi.co/api/v2/pokemon")) - print(load_info) \ No newline at end of file + print(load_info) diff --git a/docs/tools/fix_grammar_gpt.py b/docs/tools/fix_grammar_gpt.py index 1e4cf748dd..051448a2d4 100644 --- a/docs/tools/fix_grammar_gpt.py +++ b/docs/tools/fix_grammar_gpt.py @@ -41,7 +41,10 @@ parser.add_argument( "-f", "--files", - help="Specify the file name. Grammar Checker will filter all .md files containing this string in the filepath.", + help=( + "Specify the file name. Grammar Checker will filter all .md files containing this" + " string in the filepath." + ), type=str, ) diff --git a/tests/load/clickhouse/test_utls.py b/tests/load/clickhouse/test_utls.py index 9c9c862623..91cb5b7ec7 100644 --- a/tests/load/clickhouse/test_utls.py +++ b/tests/load/clickhouse/test_utls.py @@ -2,7 +2,7 @@ from dlt.destinations.impl.clickhouse.utils import ( convert_storage_to_http_scheme, - render_s3_table_function, + render_object_storage_table_function, ) @@ -106,7 +106,7 @@ def test_render_with_credentials_jsonl() -> None: """s3('https://example.com/data.jsonl','test_access_key','test_secret_key','JSONEachRow')""" ) assert ( - render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] + render_object_storage_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] == expected_output ) @@ -120,7 +120,7 @@ def test_render_with_credentials_parquet() -> None: """s3('https://example.com/data.parquet','test_access_key','test_secret_key','Parquet')""" ) assert ( - render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] + render_object_storage_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] == expected_output ) @@ -129,7 +129,7 @@ def test_render_without_credentials() -> None: url = "https://example.com/data.jsonl" file_format = "jsonl" expected_output = """s3('https://example.com/data.jsonl',NOSIGN,'JSONEachRow')""" - assert render_s3_table_function(url, file_format=file_format) == expected_output # type: ignore[arg-type] + assert render_object_storage_table_function(url, file_format=file_format) == expected_output # type: ignore[arg-type] def test_render_invalid_file_format() -> None: @@ -138,7 +138,7 @@ def test_render_invalid_file_format() -> None: secret_access_key = "test_secret_key" file_format = "unknown" with pytest.raises(ValueError) as excinfo: - render_s3_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] + render_object_storage_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] assert "Clickhouse s3/gcs staging only supports 'parquet' and 'jsonl'." == str(excinfo.value) @@ -150,5 +150,5 @@ def test_invalid_url_format() -> None: def test_render_missing_url() -> None: with pytest.raises(TypeError) as excinfo: - render_s3_table_function() # type: ignore + render_object_storage_table_function() # type: ignore assert "missing 1 required positional argument: 'url'" in str(excinfo.value) diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index 79fcea53da..e48f45be13 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -8,7 +8,7 @@ @pytest.mark.parametrize( "destination_config", - destinations_configs(default_staging_configs=True, subset=["clickhouse"]), + destinations_configs(all_staging_configs=True, subset=["clickhouse"]), ids=lambda x: x.name, ) def test_clickhouse_destinations(destination_config: DestinationTestConfiguration) -> None: @@ -22,7 +22,9 @@ def items(): "sub_items": [{"id": 101, "name": "sub item 101"}, {"id": 101, "name": "sub item 102"}], } - pipeline.run(items, loader_file_format=destination_config.file_format) + pipeline.run( + items, loader_file_format=destination_config.file_format, staging=destination_config.staging + ) table_counts = load_table_counts( pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] diff --git a/tests/load/utils.py b/tests/load/utils.py index 0574b719e7..18d4234bdd 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -45,7 +45,7 @@ assert_all_data_types_row, ) -# bucket urls +# Bucket urls. AWS_BUCKET = dlt.config.get("tests.bucket_url_s3", str) GCS_BUCKET = dlt.config.get("tests.bucket_url_gs", str) AZ_BUCKET = dlt.config.get("tests.bucket_url_az", str) @@ -64,7 +64,7 @@ "r2", ] -# Filter out buckets not in all filesystem drivers +# Filter out buckets not in all filesystem drivers. DEFAULT_BUCKETS = [GCS_BUCKET, AWS_BUCKET, FILE_BUCKET, MEMORY_BUCKET, AZ_BUCKET] DEFAULT_BUCKETS = [ bucket for bucket in DEFAULT_BUCKETS if bucket.split(":")[0] in ALL_FILESYSTEM_DRIVERS @@ -114,10 +114,7 @@ def name(self) -> str: name: str = self.destination if self.file_format: name += f"-{self.file_format}" - if not self.staging: - name += "-no-staging" - else: - name += "-staging" + name += "-staging" if self.staging else "-no-staging" if self.extra_info: name += f"-{self.extra_info}" return name @@ -180,7 +177,7 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration(destination="duckdb", file_format="parquet") ] - # athena needs filesystem staging, which will be automatically set, we have to supply a bucket url though + # Athena needs filesystem staging, which will be automatically set; we have to supply a bucket url though. destination_configs += [ DestinationTestConfiguration( destination="athena", @@ -208,15 +205,6 @@ def destinations_configs( extra_info="az-authorization", ) ] - destination_configs += [ - DestinationTestConfiguration( - destination="clickhouse", - file_format="jsonl", - bucket_url=AWS_BUCKET, - supports_merge=True, - supports_dbt=False, - ) - ] destination_configs += [ DestinationTestConfiguration(destination="mssql", supports_dbt=False), DestinationTestConfiguration(destination="synapse", supports_dbt=False), @@ -304,15 +292,50 @@ def destinations_configs( bucket_url=AWS_BUCKET, extra_info="s3-authorization", ), + DestinationTestConfiguration( + destination="synapse", + staging="filesystem", + file_format="parquet", + bucket_url=AZ_BUCKET, + extra_info="az-authorization", + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=GCS_BUCKET, + extra_info="gcs-authorization", + ), DestinationTestConfiguration( destination="clickhouse", staging="filesystem", file_format="jsonl", bucket_url=AWS_BUCKET, - extra_info="s3-integration", + extra_info="s3-authorization", ), DestinationTestConfiguration( - destination="synapse", + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=AZ_BUCKET, + extra_info="az-authorization", + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="parquet", + bucket_url=GCS_BUCKET, + extra_info="gcs-authorization", + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="parquet", + bucket_url=AWS_BUCKET, + extra_info="s3-authorization", + ), + DestinationTestConfiguration( + destination="clickhouse", staging="filesystem", file_format="parquet", bucket_url=AZ_BUCKET, @@ -485,10 +508,7 @@ def prepare_table( client.schema.bump_version() client.update_stored_schema() user_table = load_table(case_name)[table_name] - if make_uniq_table: - user_table_name = table_name + uniq_id() - else: - user_table_name = table_name + user_table_name = table_name + uniq_id() if make_uniq_table else table_name client.schema.update_table(new_table(user_table_name, columns=list(user_table.values()))) client.schema.bump_version() client.update_stored_schema() @@ -561,7 +581,7 @@ def yield_client_with_storage( destination_type: str, default_config_values: StrAny = None, schema_name: str = "event" ) -> Iterator[SqlJobClientBase]: # create dataset with random name - dataset_name = "test_" + uniq_id() + dataset_name = f"test_{uniq_id()}" with cm_yield_client( destination_type, dataset_name, default_config_values, schema_name From e702d8abd18df53f42e1a24dffec6fb42d88e886 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 23 Mar 2024 22:33:45 +0200 Subject: [PATCH 029/127] Remove old comment #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 4524ff8d2b..dcbbd4987e 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -192,7 +192,6 @@ def __init__( schema: Schema, config: ClickhouseClientConfiguration, ) -> None: - # TODO: There are no schemas in Clickhouse. No point in having schemas, only dataset names and table names for example "dataset1_mytable". self.sql_client: ClickhouseSqlClient = ClickhouseSqlClient( config.normalize_dataset_name(schema), config.credentials ) From 83b38121b6081afd98d96264a2091a612df559c6 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 23 Mar 2024 23:27:45 +0200 Subject: [PATCH 030/127] Update pyproject.toml #1055 Signed-off-by: Marcel Coetzee --- poetry.lock | 7 ++++--- pyproject.toml | 11 ++++++----- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6a724d655a..094345d590 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1674,7 +1674,7 @@ PyYAML = ">=3.11" name = "clickhouse-driver" version = "0.2.7" description = "Python driver with native interface for ClickHouse" -optional = false +optional = true python-versions = ">=3.7, <4" files = [ {file = "clickhouse-driver-0.2.7.tar.gz", hash = "sha256:299cfbe6d561955d88eeab6e09f3de31e2f6daccc6fdd904a59e46357d2d28d9"}, @@ -8646,7 +8646,7 @@ files = [ name = "tzlocal" version = "5.2" description = "tzinfo object for the local timezone" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, @@ -9085,6 +9085,7 @@ athena = ["botocore", "pyarrow", "pyathena", "s3fs"] az = ["adlfs"] bigquery = ["gcsfs", "google-cloud-bigquery", "grpcio", "pyarrow"] cli = ["cron-descriptor", "pipdeptree"] +clickhouse = ["clickhouse-driver"] databricks = ["databricks-sql-connector"] dbt = ["dbt-athena-community", "dbt-bigquery", "dbt-core", "dbt-databricks", "dbt-duckdb", "dbt-redshift", "dbt-snowflake"] duckdb = ["duckdb", "duckdb"] @@ -9105,4 +9106,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "1383f10b512c27571168aa003fe72f18b06d30be31924083bee8834742e95ad2" +content-hash = "3059208353b25cbd14865a8b59e8b1cb5aacfc988b60fb950051debe53abaf65" diff --git a/pyproject.toml b/pyproject.toml index 86599a36f4..c02d80d182 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,6 +80,7 @@ pyodbc = {version = "^4.0.39", optional = true} qdrant-client = {version = "^1.6.4", optional = true, extras = ["fastembed"]} databricks-sql-connector = {version = ">=2.9.3,<3.0.0", optional = true} dbt-databricks = {version = "^1.7.3", optional = true} +clickhouse-driver = { version = "^0.2.7", optional = true } [tool.poetry.extras] dbt = ["dbt-core", "dbt-redshift", "dbt-bigquery", "dbt-duckdb", "dbt-snowflake", "dbt-athena-community", "dbt-databricks"] @@ -103,6 +104,7 @@ mssql = ["pyodbc"] synapse = ["pyodbc", "adlfs", "pyarrow"] qdrant = ["qdrant-client"] databricks = ["databricks-sql-connector"] +clickhouse = ["clickhouse-driver"] [tool.poetry.scripts] dlt = "dlt.cli._dlt:_main" @@ -145,10 +147,9 @@ google-api-python-client = ">=1.7.11" pytest-asyncio = "^0.23.5" types-sqlalchemy = "^1.4.53.38" ruff = "^0.3.2" -clickhouse-driver = "^0.2.7" [tool.poetry.group.pipeline] -optional=true +optional = true [tool.poetry.group.pipeline.dependencies] google-auth-oauthlib = "^1.0.0" @@ -191,9 +192,9 @@ SQLAlchemy = ">=1.4.0" pymysql = "^1.1.0" pypdf2 = "^3.0.1" pydoc-markdown = "^4.8.2" -connectorx="0.3.2" -dbt-core=">=1.2.0" -dbt-duckdb=">=1.2.0" +connectorx = "0.3.2" +dbt-core = ">=1.2.0" +dbt-duckdb = ">=1.2.0" pymongo = ">=4.3.3" pandas = ">2" From d26183b370ee0842e978d59ee952fe77dfcfb7ad Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 25 Mar 2024 21:46:53 +0200 Subject: [PATCH 031/127] Fix secure connection settings #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/configuration.py | 12 +++++++++++- .../load/clickhouse/test_clickhouse_configuration.py | 9 ++++++--- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index fd5b3276c0..7ef4f35bc8 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -1,4 +1,5 @@ -from typing import ClassVar, List, Any, Final, TYPE_CHECKING, Optional +import logging +from typing import ClassVar, List, Any, Final, TYPE_CHECKING, Literal from dlt.common.configuration import configspec from dlt.common.configuration.specs import ConnectionStringCredentials @@ -9,6 +10,9 @@ from dlt.common.utils import digest128 +TSecureConnection = Literal[0, 1] + + @configspec class ClickhouseCredentials(ConnectionStringCredentials): drivername: str = "clickhouse" @@ -24,6 +28,8 @@ class ClickhouseCredentials(ConnectionStringCredentials): """Timeout for establishing connection. Defaults to 10 seconds.""" send_receive_timeout: int = 300 """Timeout for sending and receiving data. Defaults to 300 seconds.""" + secure: TSecureConnection = 1 + """Enables TLS encryption when connecting to ClickHouse Server. 0 means no encryption, 1 means encrypted.""" __config_gen_annotations__: ClassVar[List[str]] = [ "host", @@ -32,6 +38,7 @@ class ClickhouseCredentials(ConnectionStringCredentials): "database", "connect_timeout", "send_receive_timeout", + "secure", ] def parse_native_representation(self, native_value: Any) -> None: @@ -40,6 +47,7 @@ def parse_native_representation(self, native_value: Any) -> None: self.send_receive_timeout = int( self.query.get("send_receive_timeout", self.send_receive_timeout) ) + self.secure = int(self.query.get("secure", self.secure)) # type: ignore[assignment] if not self.is_partial(): self.resolve() @@ -49,8 +57,10 @@ def to_url(self) -> URL: [ ("connect_timeout", str(self.connect_timeout)), ("send_receive_timeout", str(self.send_receive_timeout)), + ("secure", str(1) if self.secure else str(0)), ] ) + logging.info(url) return url diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index 3406aad902..b0710224d9 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -12,20 +12,23 @@ def test_connection_string_with_all_params() -> None: - url = "clickhouse://user1:pass1@host1:9000/db1" + url = "clickhouse://user1:pass1@host1:9000/testdb?secure=0&connect_timeout=230&send_receive_timeout=1000" creds = ClickhouseCredentials() creds.parse_native_representation(url) - assert creds.database == "db1" + assert creds.database == "testdb" assert creds.username == "user1" assert creds.password == "pass1" assert creds.host == "host1" assert creds.port == 9000 + assert creds.secure == 0 + assert creds.connect_timeout == 230 + assert creds.send_receive_timeout == 1000 expected = make_url(url) - # Test URL components regardless of query param order + # Test URL components regardless of query param order. assert make_url(creds.to_native_representation()) == expected From e596ae3eebdc4989d4a0fce83e6b10070c2396c8 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 25 Mar 2024 22:15:12 +0200 Subject: [PATCH 032/127] Minor config parsing amendments #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/configuration.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 7ef4f35bc8..0689d70be5 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -1,5 +1,5 @@ import logging -from typing import ClassVar, List, Any, Final, TYPE_CHECKING, Literal +from typing import ClassVar, List, Any, Final, TYPE_CHECKING, Literal, cast from dlt.common.configuration import configspec from dlt.common.configuration.specs import ConnectionStringCredentials @@ -20,34 +20,35 @@ class ClickhouseCredentials(ConnectionStringCredentials): """Host with running ClickHouse server.""" port: int = 9000 """Port ClickHouse server is bound to. Defaults to 9000.""" - user: str = "default" + username: str = "default" """Database user. Defaults to 'default'.""" database: str = "default" """database connect to. Defaults to 'default'.""" + secure: TSecureConnection = 1 + """Enables TLS encryption when connecting to ClickHouse Server. 0 means no encryption, 1 means encrypted.""" connect_timeout: int = 10 """Timeout for establishing connection. Defaults to 10 seconds.""" send_receive_timeout: int = 300 """Timeout for sending and receiving data. Defaults to 300 seconds.""" - secure: TSecureConnection = 1 - """Enables TLS encryption when connecting to ClickHouse Server. 0 means no encryption, 1 means encrypted.""" __config_gen_annotations__: ClassVar[List[str]] = [ "host", "port", - "user", + "username", "database", + "secure", "connect_timeout", "send_receive_timeout", - "secure", ] + def parse_native_representation(self, native_value: Any) -> None: super().parse_native_representation(native_value) self.connect_timeout = int(self.query.get("connect_timeout", self.connect_timeout)) self.send_receive_timeout = int( self.query.get("send_receive_timeout", self.send_receive_timeout) ) - self.secure = int(self.query.get("secure", self.secure)) # type: ignore[assignment] + self.secure = cast(TSecureConnection, int(self.query.get("secure", self.secure))) if not self.is_partial(): self.resolve() From e970a31cf7c8ad0212ea0349641cf1333967fa53 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 25 Mar 2024 23:05:41 +0200 Subject: [PATCH 033/127] Fix ssl connection (correct port) #1055 Signed-off-by: Marcel Coetzee --- dlt/cli/config_toml_writer.py | 22 +++++++------- dlt/common/libs/sql_alchemy.py | 29 +++++++++---------- .../impl/clickhouse/configuration.py | 7 ++--- 3 files changed, 25 insertions(+), 33 deletions(-) diff --git a/dlt/cli/config_toml_writer.py b/dlt/cli/config_toml_writer.py index 8cf831d725..97a23fa0ef 100644 --- a/dlt/cli/config_toml_writer.py +++ b/dlt/cli/config_toml_writer.py @@ -36,10 +36,9 @@ def generate_typed_example(name: str, hint: AnyType) -> Any: if sc_type == "complex": if issubclass(inner_hint, C_Sequence): return ["a", "b", "c"] - else: - table = tomlkit.table(False) - table["key"] = "value" - return table + table = tomlkit.table(False) + table["key"] = "value" + return table if sc_type == "timestamp": return pendulum.now().to_iso8601_string() if sc_type == "date": @@ -74,15 +73,14 @@ def write_value( write_spec(inner_table, hint(), overwrite_existing) if len(inner_table) > 0: toml_table[name] = inner_table + elif default_value is None: + example_value = generate_typed_example(name, hint) + toml_table[name] = example_value + # tomlkit not supporting comments on boolean + if not isinstance(example_value, bool): + toml_table[name].comment("please set me up!") else: - if default_value is None: - example_value = generate_typed_example(name, hint) - toml_table[name] = example_value - # tomlkit not supporting comments on boolean - if not isinstance(example_value, bool): - toml_table[name].comment("please set me up!") - else: - toml_table[name] = default_value + toml_table[name] = default_value def write_spec(toml_table: TOMLTable, config: BaseConfiguration, overwrite_existing: bool) -> None: diff --git a/dlt/common/libs/sql_alchemy.py b/dlt/common/libs/sql_alchemy.py index 2f3b51ec0d..a8797d1cb5 100644 --- a/dlt/common/libs/sql_alchemy.py +++ b/dlt/common/libs/sql_alchemy.py @@ -117,21 +117,18 @@ def _assert_port(cls, port: Optional[int]) -> Optional[int]: return None try: return int(port) - except TypeError: - raise TypeError("Port argument must be an integer or None") + except TypeError as e: + raise TypeError("Port argument must be an integer or None") from e @classmethod def _assert_str(cls, v: str, paramname: str) -> str: if not isinstance(v, str): - raise TypeError("%s must be a string" % paramname) + raise TypeError(f"{paramname} must be a string") return v @classmethod def _assert_none_str(cls, v: Optional[str], paramname: str) -> Optional[str]: - if v is None: - return v - - return cls._assert_str(v, paramname) + return v if v is None else cls._assert_str(v, paramname) @classmethod def _str_dict( @@ -254,14 +251,14 @@ def update_query_pairs( new_query: Mapping[str, Union[str, Sequence[str]]] if append: - new_query = {} - - for k in new_keys: - if k in existing_query: - new_query[k] = tuple(to_list(existing_query[k]) + to_list(new_keys[k])) - else: - new_query[k] = new_keys[k] - + new_query = { + k: ( + tuple(to_list(existing_query[k]) + to_list(new_keys[k])) + if k in existing_query + else new_keys[k] + ) + for k in new_keys + } new_query.update( {k: existing_query[k] for k in set(existing_query).difference(new_keys)} ) @@ -283,7 +280,7 @@ def update_query_dict( def render_as_string(self, hide_password: bool = True) -> str: """Render this `URL` object as a string.""" - s = self.drivername + "://" + s = f"{self.drivername}://" if self.username is not None: s += quote(self.username, safe=" +") if self.password is not None: diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 0689d70be5..1be23d757f 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -1,4 +1,3 @@ -import logging from typing import ClassVar, List, Any, Final, TYPE_CHECKING, Literal, cast from dlt.common.configuration import configspec @@ -18,7 +17,7 @@ class ClickhouseCredentials(ConnectionStringCredentials): drivername: str = "clickhouse" host: str """Host with running ClickHouse server.""" - port: int = 9000 + port: int = 9440 """Port ClickHouse server is bound to. Defaults to 9000.""" username: str = "default" """Database user. Defaults to 'default'.""" @@ -41,7 +40,6 @@ class ClickhouseCredentials(ConnectionStringCredentials): "send_receive_timeout", ] - def parse_native_representation(self, native_value: Any) -> None: super().parse_native_representation(native_value) self.connect_timeout = int(self.query.get("connect_timeout", self.connect_timeout)) @@ -54,14 +52,13 @@ def parse_native_representation(self, native_value: Any) -> None: def to_url(self) -> URL: url = super().to_url() - url.update_query_pairs( + url = url.update_query_pairs( [ ("connect_timeout", str(self.connect_timeout)), ("send_receive_timeout", str(self.send_receive_timeout)), ("secure", str(1) if self.secure else str(0)), ] ) - logging.info(url) return url From 69a672cd01c785cc2edf21c38346b09ab9349f2a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 26 Mar 2024 22:21:51 +0200 Subject: [PATCH 034/127] Reuse Athena destination pyformat converter #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/sql_client.py | 47 +++++++++--------- dlt/destinations/utils.py | 49 +++++++++++++++---- 2 files changed, 63 insertions(+), 33 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 82c3abb776..26586642f2 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -10,6 +10,7 @@ import clickhouse_driver # type: ignore[import-untyped] import clickhouse_driver.errors # type: ignore[import-untyped] +from clickhouse_driver.dbapi import OperationalError # type: ignore[import-untyped] from clickhouse_driver.dbapi.extras import DictCursor # type: ignore[import-untyped] from dlt.common.destination import DestinationCapabilitiesContext @@ -28,6 +29,7 @@ raise_open_connection_error, ) from dlt.destinations.typing import DBTransaction, DBApi +from dlt.destinations.utils import _convert_to_old_pyformat TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE = ( @@ -106,16 +108,16 @@ def drop_dataset(self) -> None: """ SELECT name FROM system.tables - WHERE database = %(db_name)s - AND name LIKE %(dataset_name)s + WHERE database = %s + AND name LIKE %s """, - {"db_name": self.database_name, "dataset_name": self.dataset_name}, + (self.database_name, f"{self.dataset_name}%"), ) for to_drop_result in to_drop_results: table = to_drop_result[0] self.execute_sql( - "DROP TABLE %(database)s.%(table)s SYNC", - {"database": self.database_name, "table": table}, + "DROP TABLE %s.%s SYNC", + (self.database_name, table), ) @contextmanager @@ -123,15 +125,23 @@ def drop_dataset(self) -> None: def execute_query( self, query: AnyStr, *args: Any, **kwargs: Any ) -> Iterator[ClickhouseDBApiCursorImpl]: - cur: clickhouse_driver.dbapi.connection.Cursor - with self._conn.cursor() as cur: - try: - cur.execute(query, args or (kwargs or None)) - yield ClickhouseDBApiCursorImpl(cur) # type: ignore[abstract] - except clickhouse_driver.dbapi.Error: - self.close_connection() - self.open_connection() - raise + assert isinstance(query, str), "Query must be a string" + + db_args = kwargs.copy() + + if args: + query, db_args = _convert_to_old_pyformat(query, args, OperationalError) + db_args.update(kwargs) + + with self._conn.cursor() as cursor: + for query_line in query.split(";"): + if query_line := query_line.strip(): + try: + cursor.execute(query_line, db_args) + except KeyError as e: + raise DatabaseTransientException(OperationalError()) from e + + yield ClickhouseDBApiCursorImpl(cursor) # type: ignore[abstract] def fully_qualified_dataset_name(self, escape: bool = True) -> str: database_name = ( @@ -189,15 +199,6 @@ def _make_database_exception(cls, ex: Exception) -> Exception: # type: ignore[r else: return ex - def has_dataset(self) -> bool: - query = """ - SELECT 1 FROM INFORMATION_SCHEMA.SCHEMATA WHERE - catalog_name = %(database)s AND schema_name = %(table)s - """ - database, table = self.fully_qualified_dataset_name(escape=False).split(".", 2) - rows = self.execute_sql(query, {"database": database, "table": table}) - return len(rows) > 0 - @staticmethod def _maybe_make_terminal_exception_from_data_error() -> Optional[Exception]: return None diff --git a/dlt/destinations/utils.py b/dlt/destinations/utils.py index d4b945a840..500eec2ceb 100644 --- a/dlt/destinations/utils.py +++ b/dlt/destinations/utils.py @@ -1,16 +1,45 @@ -from typing import Any +import re +from typing import Any, cast, Tuple, Dict, Type +from dlt.destinations.exceptions import DatabaseTransientException from dlt.extract import DltResource, resource as make_resource def ensure_resource(data: Any) -> DltResource: """Wraps `data` in a DltResource if it's not a DltResource already.""" - resource: DltResource - if not isinstance(data, DltResource): - resource_name: str = None - if not hasattr(data, "__name__"): - resource_name = "content" - resource = make_resource(data, name=resource_name) - else: - resource = data - return resource + if isinstance(data, DltResource): + return data + resource_name = None if hasattr(data, "__name__") else "content" + return cast(DltResource, make_resource(data, name=resource_name)) + + +def _convert_to_old_pyformat( + new_style_string: str, args: Tuple[Any, ...], operational_error_cls: Type[Exception] +) -> Tuple[str, Dict[str, Any]]: + """Converts a query string from the new pyformat style to the old pyformat style. + + The new pyformat style uses placeholders like %s, while the old pyformat style + uses placeholders like %(arg0)s, where the number corresponds to the index of + the argument in the args tuple. + + Args: + new_style_string (str): The query string in the new pyformat style. + args (Tuple[Any, ...]): The arguments to be inserted into the query string. + operational_error_cls (Type[Exception]): The specific OperationalError class to be raised + in case of a mismatch between placeholders and arguments. This should be the + OperationalError class provided by the DBAPI2-compliant driver being used. + + Returns: + Tuple[str, Dict[str, Any]]: A tuple containing the converted query string + in the old pyformat style, and a dictionary mapping argument keys to values. + + Raises: + DatabaseTransientException: If there is a mismatch between the number of + placeholders in the query string, and the number of arguments provided. + """ + keys = [f"arg{str(i)}" for i, _ in enumerate(args)] + old_style_string, count = re.subn(r"%s", lambda _: f"%({keys.pop(0)})s", new_style_string) + mapping = dict(zip([f"arg{str(i)}" for i, _ in enumerate(args)], args)) + if count != len(args): + raise DatabaseTransientException(operational_error_cls()) + return old_style_string, mapping From 0cbb6577d42cc580c6e4c70858528b7401ea7dc2 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 26 Mar 2024 23:21:12 +0200 Subject: [PATCH 035/127] Filesystem Auth issues #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 10 ++++++++-- dlt/destinations/impl/clickhouse/clickhouse.py | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index f505378152..d4ec83b6f5 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -1,3 +1,5 @@ +import sys + from dlt.common.arithmetics import DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE from dlt.common.data_writers.escape import escape_clickhouse_identifier, escape_clickhouse_literal from dlt.common.destination import DestinationCapabilitiesContext @@ -14,8 +16,12 @@ def capabilities() -> DestinationCapabilitiesContext: caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal - caps.max_identifier_length = 65536 - caps.max_column_identifier_length = 65536 + # https://stackoverflow.com/questions/68358686/what-is-the-maximum-length-of-a-column-in-clickhouse-can-it-be-modified + caps.max_identifier_length = 255 + caps.max_column_identifier_length = 255 + + # Clickhouse has no max `String` type length. + caps.max_text_data_type_length = sys.maxsize caps.schema_supports_numeric_precision = True # Use 'Decimal128' with these defaults. diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index dcbbd4987e..8fb9bb5103 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -120,7 +120,7 @@ def __init__( file_name = ( FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name ) - file_extension = os.path.splitext(file_name)[1].lower() + file_extension = os.path.splitext(file_name)[1][1:].lower() # Remove dot (.) from file extension. if file_extension not in ["parquet", "jsonl"]: raise ValueError("Clickhouse staging only supports 'parquet' and 'jsonl' file formats.") From bccab86ba369cdbd4dd2403a02de54896e60aa59 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 27 Mar 2024 17:17:22 +0200 Subject: [PATCH 036/127] Fix incorrect arguments in render_object_storage_table_function #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 8fb9bb5103..e250ee8256 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -140,7 +140,7 @@ def __init__( table_function = ( render_object_storage_table_function( bucket_http_url, - staging_credentials.aws_secret_access_key, + staging_credentials.aws_access_key_id, staging_credentials.aws_secret_access_key, file_format=file_extension, ) From c56f1a4927b557a262ecb76bd76cf7d9b72bd339 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 30 Mar 2024 23:31:41 +0200 Subject: [PATCH 037/127] Pass all providers append tests #1055 --- dlt/destinations/impl/clickhouse/__init__.py | 8 +- .../impl/clickhouse/clickhouse.py | 82 ++++++++++++++----- .../test_clickhouse_configuration.py | 17 ++++ .../test_clickhouse_table_builder.py | 2 +- tests/load/pipeline/test_clickhouse.py | 70 +++++++++++++++- tests/load/utils.py | 37 ++++----- 6 files changed, 167 insertions(+), 49 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index d4ec83b6f5..445a2f12a9 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -8,10 +8,10 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() - caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["jsonl", "parquet"] - caps.preferred_staging_file_format = "jsonl" - caps.supported_staging_file_formats = ["jsonl", "parquet"] + caps.preferred_loader_file_format = "parquet" + caps.supported_loader_file_formats = ["parquet"] + caps.preferred_staging_file_format = "parquet" + caps.supported_staging_file_formats = ["parquet"] caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index e250ee8256..c75a14ab34 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,12 +1,16 @@ import os from copy import deepcopy -from typing import ClassVar, Optional, Dict, List, Sequence, cast +from typing import ClassVar, Optional, Dict, List, Sequence, cast, Tuple from urllib.parse import urlparse +import dlt from dlt.common.configuration.specs import ( CredentialsConfiguration, - AwsCredentialsWithoutDefaults, AzureCredentialsWithoutDefaults, + AwsCredentials, + GcpCredentials, + AzureCredentials, + AwsCredentialsWithoutDefaults, ) from dlt.common.destination import DestinationCapabilitiesContext from dlt.common.destination.reference import ( @@ -16,7 +20,14 @@ LoadJob, ) from dlt.common.schema import Schema, TColumnSchema -from dlt.common.schema.typing import TTableFormat, TTableSchema, TColumnHint, TColumnType +from dlt.common.schema.typing import ( + TTableFormat, + TTableSchema, + TColumnHint, + TColumnType, + TTableSchemaColumns, + TColumnSchemaBase, +) from dlt.common.storages import FileStorage from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( @@ -120,7 +131,9 @@ def __init__( file_name = ( FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name ) - file_extension = os.path.splitext(file_name)[1][1:].lower() # Remove dot (.) from file extension. + file_extension = os.path.splitext(file_name)[1][ + 1: + ].lower() # Remove dot (.) from file extension. if file_extension not in ["parquet", "jsonl"]: raise ValueError("Clickhouse staging only supports 'parquet' and 'jsonl' file formats.") @@ -137,26 +150,29 @@ def __init__( if bucket_scheme in ("s3", "gs", "gcs"): bucket_http_url = convert_storage_to_http_scheme(bucket_url) - table_function = ( - render_object_storage_table_function( - bucket_http_url, - staging_credentials.aws_access_key_id, - staging_credentials.aws_secret_access_key, - file_format=file_extension, - ) - if isinstance(staging_credentials, AwsCredentialsWithoutDefaults) - else render_object_storage_table_function( - bucket_http_url, file_format=file_extension - ) + if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): + access_key_id = staging_credentials.aws_access_key_id + secret_access_key = staging_credentials.aws_secret_access_key + elif isinstance(staging_credentials, GcpCredentials): + # TODO: HMAC keys aren't implemented in `GcpCredentials`. + access_key_id = dlt.config["destination.filesystem.credentials.gcp_access_key_id"] + secret_access_key = dlt.config[ + "destination.filesystem.credentials.gcp_secret_access_key" + ] + else: + access_key_id = None + secret_access_key = None + + table_function = render_object_storage_table_function( + bucket_http_url, access_key_id, secret_access_key, file_format=file_extension ) + elif bucket_scheme in ("az", "abfs"): if isinstance(staging_credentials, AzureCredentialsWithoutDefaults): # Authenticated access. account_name = staging_credentials.azure_storage_account_name - storage_account_url = ( - f"{staging_credentials.azure_storage_account_name}.blob.core.windows.net" - ) - account_key = staging_credentials.azure_storage_sas_token + storage_account_url = f"https://{staging_credentials.azure_storage_account_name}.blob.core.windows.net" + account_key = staging_credentials.azure_storage_account_key container_name = bucket_url.netloc blobpath = bucket_url.path @@ -261,6 +277,34 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non .strip() ) + def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns]: + fields = self._get_storage_table_query_columns() + db_params = self.sql_client.make_qualified_table_name(table_name, escape=False).split( + ".", 3 + ) + query = f'SELECT {",".join(fields)} FROM INFORMATION_SCHEMA.COLUMNS WHERE ' + if len(db_params) == 3: + query += "table_catalog = %s AND " + query += "table_schema = %s AND table_name = %s ORDER BY ordinal_position;" + rows = self.sql_client.execute_sql(query, *db_params) + + # If no rows we assume that table does not exist. + schema_table: TTableSchemaColumns = {} + if len(rows) == 0: + return False, schema_table + for c in rows: + numeric_precision = ( + c[3] if self.capabilities.schema_supports_numeric_precision else None + ) + numeric_scale = c[4] if self.capabilities.schema_supports_numeric_precision else None + schema_c: TColumnSchemaBase = { + "name": c[0], + "nullable": bool(c[2]), + **self._from_db_type(c[1], numeric_precision, numeric_scale), + } + schema_table[c[0]] = schema_c # type: ignore + return True, schema_table + # Clickhouse fields are not nullable by default. @staticmethod def _gen_not_null(v: bool) -> str: diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index b0710224d9..9beb847f85 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -1,3 +1,8 @@ +from typing import Any + +import pytest + +import dlt from dlt.common.configuration.resolve import resolve_configuration from dlt.common.libs.sql_alchemy import make_url from dlt.common.utils import digest128 @@ -9,6 +14,7 @@ SnowflakeClientConfiguration, SnowflakeCredentials, ) +from tests.common.configuration.utils import environment def test_connection_string_with_all_params() -> None: @@ -41,3 +47,14 @@ def test_clickhouse_configuration() -> None: explicit_value="clickhouse://user1:pass1@host1:9000/db1", ) assert SnowflakeClientConfiguration(credentials=c).fingerprint() == digest128("host1") + + +@pytest.mark.usefixtures("environment") +def test_gcp_hmac_getter_accessor(environment: Any) -> None: + environment["DESTINATION__FILESYSTEM__CREDENTIALS__GCP_ACCESS_KEY_ID"] = "25g08jaDJacj42" + environment["DESTINATION__FILESYSTEM__CREDENTIALS__GCP_SECRET_ACCESS_KEY"] = "ascvntp45uasdf" + + assert dlt.config["destination.filesystem.credentials.gcp_access_key_id"] == "25g08jaDJacj42" + assert ( + dlt.config["destination.filesystem.credentials.gcp_secret_access_key"] == "ascvntp45uasdf" + ) diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 92d617e7c2..dad682a108 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -167,6 +167,6 @@ def test_create_table_with_hints(client: ClickhouseClient) -> None: assert "`col1` bigint SORTKEY NOT NULL" in sql assert "`col2` double precision DISTKEY NOT NULL" in sql assert "`col5` varchar(max) DISTKEY" in sql - # no hints + # No hints. assert "`col3` boolean NOT NULL" in sql assert "`col4` timestamp with time zone NOT NULL" in sql diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index e48f45be13..b930e1a7d5 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -1,9 +1,15 @@ +from typing import Iterator + import pytest import dlt +from dlt.common.typing import TDataItem from dlt.common.utils import uniq_id -from tests.load.pipeline.utils import destinations_configs, DestinationTestConfiguration -from tests.load.pipeline.utils import load_table_counts +from tests.load.pipeline.utils import ( + destinations_configs, + DestinationTestConfiguration, + load_table_counts, +) @pytest.mark.parametrize( @@ -11,7 +17,7 @@ destinations_configs(all_staging_configs=True, subset=["clickhouse"]), ids=lambda x: x.name, ) -def test_clickhouse_destinations(destination_config: DestinationTestConfiguration) -> None: +def test_clickhouse_destinations_append(destination_config: DestinationTestConfiguration) -> None: pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) @dlt.resource(name="items", write_disposition="append") @@ -61,3 +67,61 @@ def items2(): assert table_counts["items"] == 2 assert table_counts["items__sub_items"] == 4 assert table_counts["_dlt_loads"] == 2 + + +@pytest.mark.skip() +@pytest.mark.parametrize( + "destination_config", + destinations_configs(all_staging_configs=True, subset=["clickhouse"]), + ids=lambda x: x.name, +) +def test_clickhouse_destinations_merge(destination_config: DestinationTestConfiguration) -> None: + pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) + + @dlt.resource(name="items", write_disposition="append") + def items() -> Iterator[TDataItem]: + yield { + "id": 1, + "name": "item", + "sub_items": [{"id": 101, "name": "sub item 101"}, {"id": 101, "name": "sub item 102"}], + } + + pipeline.run( + items, loader_file_format=destination_config.file_format, staging=destination_config.staging + ) + + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 + assert table_counts["_dlt_loads"] == 1 + + # Load again with schema evolution. + @dlt.resource(name="items", write_disposition="merge") + def items2(): + yield { + "id": 1, + "name": "item", + "new_field": "hello", + "sub_items": [ + { + "id": 101, + "name": "sub item 101", + "other_new_field": "hello 101", + }, + { + "id": 101, + "name": "sub item 102", + "other_new_field": "hello 102", + }, + ], + } + + pipeline.run(items2) + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 2 + assert table_counts["items__sub_items"] == 4 + assert table_counts["_dlt_loads"] == 2 diff --git a/tests/load/utils.py b/tests/load/utils.py index 18d4234bdd..bd655761e7 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -299,27 +299,6 @@ def destinations_configs( bucket_url=AZ_BUCKET, extra_info="az-authorization", ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="jsonl", - bucket_url=GCS_BUCKET, - extra_info="gcs-authorization", - ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="jsonl", - bucket_url=AWS_BUCKET, - extra_info="s3-authorization", - ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="jsonl", - bucket_url=AZ_BUCKET, - extra_info="az-authorization", - ), DestinationTestConfiguration( destination="clickhouse", staging="filesystem", @@ -369,10 +348,24 @@ def destinations_configs( DestinationTestConfiguration( destination="clickhouse", staging="filesystem", - file_format="jsonl", + file_format="parquet", bucket_url=AWS_BUCKET, extra_info="credential-forwarding", ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="parquet", + bucket_url=GCS_BUCKET, + extra_info="credential-forwarding", + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="parquet", + bucket_url=AZ_BUCKET, + extra_info="credential-forwarding", + ), DestinationTestConfiguration( destination="bigquery", staging="filesystem", From 74e5a6dd643fc2aed3d62195e24ce9993ea68298 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sun, 31 Mar 2024 00:08:28 +0200 Subject: [PATCH 038/127] Add merge test #1055 --- tests/load/pipeline/test_clickhouse.py | 13 ++++++------- tests/load/utils.py | 21 --------------------- 2 files changed, 6 insertions(+), 28 deletions(-) diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index b930e1a7d5..5dea3252ed 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -21,7 +21,7 @@ def test_clickhouse_destinations_append(destination_config: DestinationTestConfi pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) @dlt.resource(name="items", write_disposition="append") - def items(): + def items() -> Iterator[TDataItem]: yield { "id": 1, "name": "item", @@ -41,7 +41,7 @@ def items(): # Load again with schema evolution. @dlt.resource(name="items", write_disposition="append") - def items2(): + def items2() -> Iterator[TDataItem]: yield { "id": 1, "name": "item", @@ -69,7 +69,6 @@ def items2(): assert table_counts["_dlt_loads"] == 2 -@pytest.mark.skip() @pytest.mark.parametrize( "destination_config", destinations_configs(all_staging_configs=True, subset=["clickhouse"]), @@ -78,7 +77,7 @@ def items2(): def test_clickhouse_destinations_merge(destination_config: DestinationTestConfiguration) -> None: pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) - @dlt.resource(name="items", write_disposition="append") + @dlt.resource(name="items", write_disposition="merge") def items() -> Iterator[TDataItem]: yield { "id": 1, @@ -99,7 +98,7 @@ def items() -> Iterator[TDataItem]: # Load again with schema evolution. @dlt.resource(name="items", write_disposition="merge") - def items2(): + def items2() -> Iterator[TDataItem]: yield { "id": 1, "name": "item", @@ -122,6 +121,6 @@ def items2(): table_counts = load_table_counts( pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] ) - assert table_counts["items"] == 2 - assert table_counts["items__sub_items"] == 4 + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 assert table_counts["_dlt_loads"] == 2 diff --git a/tests/load/utils.py b/tests/load/utils.py index bd655761e7..f5e0052770 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -345,27 +345,6 @@ def destinations_configs( bucket_url=AWS_BUCKET, extra_info="credential-forwarding", ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="parquet", - bucket_url=AWS_BUCKET, - extra_info="credential-forwarding", - ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="parquet", - bucket_url=GCS_BUCKET, - extra_info="credential-forwarding", - ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="parquet", - bucket_url=AZ_BUCKET, - extra_info="credential-forwarding", - ), DestinationTestConfiguration( destination="bigquery", staging="filesystem", From ff4e214a5eee11372f91e458401f27310482412b Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 2 Apr 2024 01:08:16 +0200 Subject: [PATCH 039/127] Resolved driver parameter substitution issues #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 13 +- .../impl/clickhouse/sql_client.py | 17 +- dlt/destinations/utils.py | 5 + tests/load/pipeline/test_clickhouse.py | 220 ++++++++++-------- 4 files changed, 149 insertions(+), 106 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index c75a14ab34..d42c7ee2dd 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -7,9 +7,7 @@ from dlt.common.configuration.specs import ( CredentialsConfiguration, AzureCredentialsWithoutDefaults, - AwsCredentials, GcpCredentials, - AzureCredentials, AwsCredentialsWithoutDefaults, ) from dlt.common.destination import DestinationCapabilitiesContext @@ -18,6 +16,7 @@ TLoadJobState, FollowupJob, LoadJob, + NewLoadJob, ) from dlt.common.schema import Schema, TColumnSchema from dlt.common.schema.typing import ( @@ -49,6 +48,7 @@ SqlJobClientBase, ) from dlt.destinations.job_impl import NewReferenceJob, EmptyLoadJob +from dlt.destinations.sql_jobs import SqlMergeJob from dlt.destinations.type_mapping import TypeMapper @@ -200,6 +200,12 @@ def exception(self) -> str: raise NotImplementedError() +class ClickhouseMergeJob(SqlMergeJob): + @classmethod + def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: + return f"CREATE TEMPORARY TABLE {temp_table_name} AS {select_sql};" + + class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() @@ -318,3 +324,6 @@ def _from_db_type( def restore_file_load(self, file_path: str) -> LoadJob: return EmptyLoadJob.from_file_path(file_path, "completed") + + def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: + return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 26586642f2..6641f7b752 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -14,7 +14,6 @@ from clickhouse_driver.dbapi.extras import DictCursor # type: ignore[import-untyped] from dlt.common.destination import DestinationCapabilitiesContext -from dlt.common.runtime import logger from dlt.destinations.exceptions import ( DatabaseUndefinedRelation, DatabaseTransientException, @@ -75,16 +74,13 @@ def close_connection(self) -> None: @raise_database_error def begin_transaction(self) -> Iterator[DBTransaction]: yield self - logger.warning(TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE) @raise_database_error def commit_transaction(self) -> None: - logger.warning(TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE) self._conn.commit() @raise_database_error def rollback_transaction(self) -> None: - logger.warning(TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE) self._conn.rollback() @property @@ -103,7 +99,7 @@ def create_dataset(self) -> None: def drop_dataset(self) -> None: # Since Clickhouse doesn't have schemas, we need to drop all tables in our virtual schema, - # or collection of tables that has the `dataset_name` as a prefix. + # or collection of tables, that has the `dataset_name` as a prefix. to_drop_results = self.execute_sql( """ SELECT name @@ -111,13 +107,18 @@ def drop_dataset(self) -> None: WHERE database = %s AND name LIKE %s """, - (self.database_name, f"{self.dataset_name}%"), + ( + self.database_name, + f"{self.dataset_name}%", + ), ) for to_drop_result in to_drop_results: table = to_drop_result[0] + # The "DROP TABLE" clause is discarded if we allow clickhouse_driver to handle parameter substitution. + # This is because the driver incorrectly substitutes the entire query string, causing the "DROP TABLE" keyword to be omitted. + # To resolve this, we are forced to provide the full query string here. self.execute_sql( - "DROP TABLE %s.%s SYNC", - (self.database_name, table), + f"""DROP TABLE {self.capabilities.escape_identifier(self.database_name)}.{self.capabilities.escape_identifier(table)} SYNC""" ) @contextmanager diff --git a/dlt/destinations/utils.py b/dlt/destinations/utils.py index 500eec2ceb..a2ffa490aa 100644 --- a/dlt/destinations/utils.py +++ b/dlt/destinations/utils.py @@ -37,9 +37,14 @@ def _convert_to_old_pyformat( DatabaseTransientException: If there is a mismatch between the number of placeholders in the query string, and the number of arguments provided. """ + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + keys = [f"arg{str(i)}" for i, _ in enumerate(args)] old_style_string, count = re.subn(r"%s", lambda _: f"%({keys.pop(0)})s", new_style_string) mapping = dict(zip([f"arg{str(i)}" for i, _ in enumerate(args)], args)) if count != len(args): raise DatabaseTransientException(operational_error_cls()) return old_style_string, mapping + + diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index 5dea3252ed..61d9065af4 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -17,56 +17,70 @@ destinations_configs(all_staging_configs=True, subset=["clickhouse"]), ids=lambda x: x.name, ) -def test_clickhouse_destinations_append(destination_config: DestinationTestConfiguration) -> None: +def test_clickhouse_destination_append(destination_config: DestinationTestConfiguration) -> None: pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) - @dlt.resource(name="items", write_disposition="append") - def items() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "sub_items": [{"id": 101, "name": "sub item 101"}, {"id": 101, "name": "sub item 102"}], - } - - pipeline.run( - items, loader_file_format=destination_config.file_format, staging=destination_config.staging - ) - - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 1 - assert table_counts["items__sub_items"] == 2 - assert table_counts["_dlt_loads"] == 1 - - # Load again with schema evolution. - @dlt.resource(name="items", write_disposition="append") - def items2() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "new_field": "hello", - "sub_items": [ - { - "id": 101, - "name": "sub item 101", - "other_new_field": "hello 101", - }, - { - "id": 101, - "name": "sub item 102", - "other_new_field": "hello 102", - }, - ], - } - - pipeline.run(items2) - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 2 - assert table_counts["items__sub_items"] == 4 - assert table_counts["_dlt_loads"] == 2 + try: + + @dlt.resource(name="items", write_disposition="append") + def items() -> Iterator[TDataItem]: + yield { + "id": 1, + "name": "item", + "sub_items": [ + {"id": 101, "name": "sub item 101"}, + {"id": 101, "name": "sub item 102"}, + ], + } + + pipeline.run( + items, + loader_file_format=destination_config.file_format, + staging=destination_config.staging, + ) + + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 + assert table_counts["_dlt_loads"] == 1 + + # Load again with schema evolution. + @dlt.resource(name="items", write_disposition="append") + def items2() -> Iterator[TDataItem]: + yield { + "id": 1, + "name": "item", + "new_field": "hello", + "sub_items": [ + { + "id": 101, + "name": "sub item 101", + "other_new_field": "hello 101", + }, + { + "id": 101, + "name": "sub item 102", + "other_new_field": "hello 102", + }, + ], + } + + pipeline.run(items2) + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 2 + assert table_counts["items__sub_items"] == 4 + assert table_counts["_dlt_loads"] == 2 + + except Exception as e: + raise e + + finally: + with pipeline.sql_client() as client: + client.drop_dataset() @pytest.mark.parametrize( @@ -74,53 +88,67 @@ def items2() -> Iterator[TDataItem]: destinations_configs(all_staging_configs=True, subset=["clickhouse"]), ids=lambda x: x.name, ) -def test_clickhouse_destinations_merge(destination_config: DestinationTestConfiguration) -> None: +def test_clickhouse_destination_merge(destination_config: DestinationTestConfiguration) -> None: pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) - @dlt.resource(name="items", write_disposition="merge") - def items() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "sub_items": [{"id": 101, "name": "sub item 101"}, {"id": 101, "name": "sub item 102"}], - } - - pipeline.run( - items, loader_file_format=destination_config.file_format, staging=destination_config.staging - ) - - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 1 - assert table_counts["items__sub_items"] == 2 - assert table_counts["_dlt_loads"] == 1 - - # Load again with schema evolution. - @dlt.resource(name="items", write_disposition="merge") - def items2() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "new_field": "hello", - "sub_items": [ - { - "id": 101, - "name": "sub item 101", - "other_new_field": "hello 101", - }, - { - "id": 101, - "name": "sub item 102", - "other_new_field": "hello 102", - }, - ], - } - - pipeline.run(items2) - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 1 - assert table_counts["items__sub_items"] == 2 - assert table_counts["_dlt_loads"] == 2 + try: + + @dlt.resource(name="items", write_disposition="merge") + def items() -> Iterator[TDataItem]: + yield { + "id": 1, + "name": "item", + "sub_items": [ + {"id": 101, "name": "sub item 101"}, + {"id": 101, "name": "sub item 102"}, + ], + } + + pipeline.run( + items, + loader_file_format=destination_config.file_format, + staging=destination_config.staging, + ) + + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 + assert table_counts["_dlt_loads"] == 1 + + # Load again with schema evolution. + @dlt.resource(name="items", write_disposition="merge") + def items2() -> Iterator[TDataItem]: + yield { + "id": 1, + "name": "item", + "new_field": "hello", + "sub_items": [ + { + "id": 101, + "name": "sub item 101", + "other_new_field": "hello 101", + }, + { + "id": 101, + "name": "sub item 102", + "other_new_field": "hello 102", + }, + ], + } + + pipeline.run(items2) + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 + assert table_counts["_dlt_loads"] == 2 + + except Exception as e: + raise e + + finally: + with pipeline.sql_client() as client: + client.drop_dataset() From 5a31ca4ca0afc4360329d6aad1890d3853b1fd5a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 2 Apr 2024 16:24:55 +0200 Subject: [PATCH 040/127] Merge Disposition #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 161 +++++++++++++++++- tests/load/pipeline/test_clickhouse.py | 4 +- 2 files changed, 162 insertions(+), 3 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index d42c7ee2dd..ae2f8a10e1 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -27,7 +27,13 @@ TTableSchemaColumns, TColumnSchemaBase, ) +from dlt.common.schema.utils import ( + get_columns_names_with_prop, + get_first_column_name_with_prop, + get_dedup_sort_tuple, +) from dlt.common.storages import FileStorage +from dlt.destinations.exceptions import MergeDispositionException from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( TTableEngineType, @@ -203,7 +209,160 @@ def exception(self) -> str: class ClickhouseMergeJob(SqlMergeJob): @classmethod def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: - return f"CREATE TEMPORARY TABLE {temp_table_name} AS {select_sql};" + # Different sessions are created during the load process, and temporary tables + # do not persist between sessions. + # Resorting to persisted in-memory table to fix. + # return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" + return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" + + @classmethod + def gen_merge_sql( + cls, table_chain: Sequence[TTableSchema], sql_client: ClickhouseSqlClient # type: ignore[override] + ) -> List[str]: + sql: List[str] = [] + root_table = table_chain[0] + + escape_id = sql_client.capabilities.escape_identifier + escape_lit = sql_client.capabilities.escape_literal + if escape_id is None: + escape_id = DestinationCapabilitiesContext.generic_capabilities().escape_identifier + if escape_lit is None: + escape_lit = DestinationCapabilitiesContext.generic_capabilities().escape_literal + + root_table_name = sql_client.make_qualified_table_name(root_table["name"]) + with sql_client.with_staging_dataset(staging=True): + staging_root_table_name = sql_client.make_qualified_table_name(root_table["name"]) + primary_keys = list( + map( + escape_id, + get_columns_names_with_prop(root_table, "primary_key"), + ) + ) + merge_keys = list( + map( + escape_id, + get_columns_names_with_prop(root_table, "merge_key"), + ) + ) + key_clauses = cls._gen_key_table_clauses(primary_keys, merge_keys) + + unique_column: str = None + root_key_column: str = None + + if len(table_chain) == 1: + key_table_clauses = cls.gen_key_table_clauses( + root_table_name, staging_root_table_name, key_clauses, for_delete=True + ) + sql.extend(f"DELETE {clause};" for clause in key_table_clauses) + else: + key_table_clauses = cls.gen_key_table_clauses( + root_table_name, staging_root_table_name, key_clauses, for_delete=False + ) + unique_columns = get_columns_names_with_prop(root_table, "unique") + if not unique_columns: + raise MergeDispositionException( + sql_client.fully_qualified_dataset_name(), + staging_root_table_name, + [t["name"] for t in table_chain], + f"There is no unique column (ie _dlt_id) in top table {root_table['name']} so" + " it is not possible to link child tables to it.", + ) + unique_column = escape_id(unique_columns[0]) + create_delete_temp_table_sql, delete_temp_table_name = cls.gen_delete_temp_table_sql( + unique_column, key_table_clauses + ) + sql.extend(create_delete_temp_table_sql) + + for table in table_chain[1:]: + table_name = sql_client.make_qualified_table_name(table["name"]) + root_key_columns = get_columns_names_with_prop(table, "root_key") + if not root_key_columns: + raise MergeDispositionException( + sql_client.fully_qualified_dataset_name(), + staging_root_table_name, + [t["name"] for t in table_chain], + "There is no root foreign key (ie _dlt_root_id) in child table" + f" {table['name']} so it is not possible to refer to top level table" + f" {root_table['name']} unique column {unique_column}", + ) + root_key_column = escape_id(root_key_columns[0]) + sql.append( + cls.gen_delete_from_sql( + table_name, root_key_column, delete_temp_table_name, unique_column + ) + ) + + sql.append( + cls.gen_delete_from_sql( + root_table_name, unique_column, delete_temp_table_name, unique_column + ) + ) + + not_deleted_cond: str = None + hard_delete_col = get_first_column_name_with_prop(root_table, "hard_delete") + if hard_delete_col is not None: + not_deleted_cond = f"{escape_id(hard_delete_col)} IS NULL" + if root_table["columns"][hard_delete_col]["data_type"] == "bool": + not_deleted_cond += f" OR {escape_id(hard_delete_col)} = {escape_lit(False)}" + + dedup_sort = get_dedup_sort_tuple(root_table) + + insert_temp_table_name: str = None + if len(table_chain) > 1 and (primary_keys or hard_delete_col is not None): + condition_columns = [hard_delete_col] if not_deleted_cond is not None else None + ( + create_insert_temp_table_sql, + insert_temp_table_name, + ) = cls.gen_insert_temp_table_sql( + staging_root_table_name, + primary_keys, + unique_column, + dedup_sort, + not_deleted_cond, + condition_columns, + ) + sql.extend(create_insert_temp_table_sql) + + to_delete: List[str] = [] + + for table in table_chain: + table_name = sql_client.make_qualified_table_name(table["name"]) + with sql_client.with_staging_dataset(staging=True): + staging_table_name = sql_client.make_qualified_table_name(table["name"]) + + insert_cond = not_deleted_cond if hard_delete_col is not None else "1 = 1" + if ( + primary_keys + and len(table_chain) > 1 + or not primary_keys + and table.get("parent") is not None + and hard_delete_col is not None + ): + uniq_column = unique_column if table.get("parent") is None else root_key_column + insert_cond = f"{uniq_column} IN (SELECT * FROM {insert_temp_table_name})" + + columns = list(map(escape_id, get_columns_names_with_prop(table, "name"))) + col_str = ", ".join(columns) + select_sql = f"SELECT {col_str} FROM {staging_table_name} WHERE {insert_cond}" + if primary_keys and len(table_chain) == 1: + select_sql = cls.gen_select_from_dedup_sql( + staging_table_name, primary_keys, columns, dedup_sort, insert_cond + ) + + sql.extend([f"INSERT INTO {table_name}({col_str}) {select_sql};"]) + + if table_name is not None and table_name.startswith("delete_"): + to_delete.extend([table_name]) + if insert_temp_table_name is not None and insert_temp_table_name.startswith("delete_"): + to_delete.extend([insert_temp_table_name]) + + # TODO: Doesn't remove all `delete_` tables. + for delete_table_name in to_delete: + sql.extend( + [f"DROP TABLE IF EXISTS {sql_client.make_qualified_table_name(delete_table_name)};"] + ) + + return sql class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index 61d9065af4..1fd834389f 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -93,7 +93,7 @@ def test_clickhouse_destination_merge(destination_config: DestinationTestConfigu try: - @dlt.resource(name="items", write_disposition="merge") + @dlt.resource(name="items") def items() -> Iterator[TDataItem]: yield { "id": 1, @@ -118,7 +118,7 @@ def items() -> Iterator[TDataItem]: assert table_counts["_dlt_loads"] == 1 # Load again with schema evolution. - @dlt.resource(name="items", write_disposition="merge") + @dlt.resource(name="items", write_disposition="merge", primary_key="id") def items2() -> Iterator[TDataItem]: yield { "id": 1, From 75a61ebb9f63c1ff4aa7bee222ad44e31a9da903 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 2 Apr 2024 16:42:41 +0200 Subject: [PATCH 041/127] Fall back to append disposition for merge #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 180 ++---------------- tests/load/pipeline/test_clickhouse.py | 71 ------- 2 files changed, 13 insertions(+), 238 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index ae2f8a10e1..4fb97e51d2 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -27,13 +27,7 @@ TTableSchemaColumns, TColumnSchemaBase, ) -from dlt.common.schema.utils import ( - get_columns_names_with_prop, - get_first_column_name_with_prop, - get_dedup_sort_tuple, -) from dlt.common.storages import FileStorage -from dlt.destinations.exceptions import MergeDispositionException from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( TTableEngineType, @@ -54,7 +48,7 @@ SqlJobClientBase, ) from dlt.destinations.job_impl import NewReferenceJob, EmptyLoadJob -from dlt.destinations.sql_jobs import SqlMergeJob +from dlt.destinations.sql_jobs import SqlStagingCopyJob from dlt.destinations.type_mapping import TypeMapper @@ -206,165 +200,6 @@ def exception(self) -> str: raise NotImplementedError() -class ClickhouseMergeJob(SqlMergeJob): - @classmethod - def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: - # Different sessions are created during the load process, and temporary tables - # do not persist between sessions. - # Resorting to persisted in-memory table to fix. - # return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" - return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" - - @classmethod - def gen_merge_sql( - cls, table_chain: Sequence[TTableSchema], sql_client: ClickhouseSqlClient # type: ignore[override] - ) -> List[str]: - sql: List[str] = [] - root_table = table_chain[0] - - escape_id = sql_client.capabilities.escape_identifier - escape_lit = sql_client.capabilities.escape_literal - if escape_id is None: - escape_id = DestinationCapabilitiesContext.generic_capabilities().escape_identifier - if escape_lit is None: - escape_lit = DestinationCapabilitiesContext.generic_capabilities().escape_literal - - root_table_name = sql_client.make_qualified_table_name(root_table["name"]) - with sql_client.with_staging_dataset(staging=True): - staging_root_table_name = sql_client.make_qualified_table_name(root_table["name"]) - primary_keys = list( - map( - escape_id, - get_columns_names_with_prop(root_table, "primary_key"), - ) - ) - merge_keys = list( - map( - escape_id, - get_columns_names_with_prop(root_table, "merge_key"), - ) - ) - key_clauses = cls._gen_key_table_clauses(primary_keys, merge_keys) - - unique_column: str = None - root_key_column: str = None - - if len(table_chain) == 1: - key_table_clauses = cls.gen_key_table_clauses( - root_table_name, staging_root_table_name, key_clauses, for_delete=True - ) - sql.extend(f"DELETE {clause};" for clause in key_table_clauses) - else: - key_table_clauses = cls.gen_key_table_clauses( - root_table_name, staging_root_table_name, key_clauses, for_delete=False - ) - unique_columns = get_columns_names_with_prop(root_table, "unique") - if not unique_columns: - raise MergeDispositionException( - sql_client.fully_qualified_dataset_name(), - staging_root_table_name, - [t["name"] for t in table_chain], - f"There is no unique column (ie _dlt_id) in top table {root_table['name']} so" - " it is not possible to link child tables to it.", - ) - unique_column = escape_id(unique_columns[0]) - create_delete_temp_table_sql, delete_temp_table_name = cls.gen_delete_temp_table_sql( - unique_column, key_table_clauses - ) - sql.extend(create_delete_temp_table_sql) - - for table in table_chain[1:]: - table_name = sql_client.make_qualified_table_name(table["name"]) - root_key_columns = get_columns_names_with_prop(table, "root_key") - if not root_key_columns: - raise MergeDispositionException( - sql_client.fully_qualified_dataset_name(), - staging_root_table_name, - [t["name"] for t in table_chain], - "There is no root foreign key (ie _dlt_root_id) in child table" - f" {table['name']} so it is not possible to refer to top level table" - f" {root_table['name']} unique column {unique_column}", - ) - root_key_column = escape_id(root_key_columns[0]) - sql.append( - cls.gen_delete_from_sql( - table_name, root_key_column, delete_temp_table_name, unique_column - ) - ) - - sql.append( - cls.gen_delete_from_sql( - root_table_name, unique_column, delete_temp_table_name, unique_column - ) - ) - - not_deleted_cond: str = None - hard_delete_col = get_first_column_name_with_prop(root_table, "hard_delete") - if hard_delete_col is not None: - not_deleted_cond = f"{escape_id(hard_delete_col)} IS NULL" - if root_table["columns"][hard_delete_col]["data_type"] == "bool": - not_deleted_cond += f" OR {escape_id(hard_delete_col)} = {escape_lit(False)}" - - dedup_sort = get_dedup_sort_tuple(root_table) - - insert_temp_table_name: str = None - if len(table_chain) > 1 and (primary_keys or hard_delete_col is not None): - condition_columns = [hard_delete_col] if not_deleted_cond is not None else None - ( - create_insert_temp_table_sql, - insert_temp_table_name, - ) = cls.gen_insert_temp_table_sql( - staging_root_table_name, - primary_keys, - unique_column, - dedup_sort, - not_deleted_cond, - condition_columns, - ) - sql.extend(create_insert_temp_table_sql) - - to_delete: List[str] = [] - - for table in table_chain: - table_name = sql_client.make_qualified_table_name(table["name"]) - with sql_client.with_staging_dataset(staging=True): - staging_table_name = sql_client.make_qualified_table_name(table["name"]) - - insert_cond = not_deleted_cond if hard_delete_col is not None else "1 = 1" - if ( - primary_keys - and len(table_chain) > 1 - or not primary_keys - and table.get("parent") is not None - and hard_delete_col is not None - ): - uniq_column = unique_column if table.get("parent") is None else root_key_column - insert_cond = f"{uniq_column} IN (SELECT * FROM {insert_temp_table_name})" - - columns = list(map(escape_id, get_columns_names_with_prop(table, "name"))) - col_str = ", ".join(columns) - select_sql = f"SELECT {col_str} FROM {staging_table_name} WHERE {insert_cond}" - if primary_keys and len(table_chain) == 1: - select_sql = cls.gen_select_from_dedup_sql( - staging_table_name, primary_keys, columns, dedup_sort, insert_cond - ) - - sql.extend([f"INSERT INTO {table_name}({col_str}) {select_sql};"]) - - if table_name is not None and table_name.startswith("delete_"): - to_delete.extend([table_name]) - if insert_temp_table_name is not None and insert_temp_table_name.startswith("delete_"): - to_delete.extend([insert_temp_table_name]) - - # TODO: Doesn't remove all `delete_` tables. - for delete_table_name in to_delete: - sql.extend( - [f"DROP TABLE IF EXISTS {sql_client.make_qualified_table_name(delete_table_name)};"] - ) - - return sql - - class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() @@ -484,5 +319,16 @@ def _from_db_type( def restore_file_load(self, file_path: str) -> LoadJob: return EmptyLoadJob.from_file_path(file_path, "completed") + def _create_append_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: + return [ + SqlStagingCopyJob.from_table_chain(table_chain, self.sql_client, {"replace": False}) + ] + + def _create_replace_followup_jobs( + self, table_chain: Sequence[TTableSchema] + ) -> List[NewLoadJob]: + return [SqlStagingCopyJob.from_table_chain(table_chain, self.sql_client, {"replace": True})] + def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: - return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] + # Fall back to append jobs for merge. + return self._create_append_followup_jobs(table_chain) diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index 1fd834389f..4a5903137b 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -81,74 +81,3 @@ def items2() -> Iterator[TDataItem]: finally: with pipeline.sql_client() as client: client.drop_dataset() - - -@pytest.mark.parametrize( - "destination_config", - destinations_configs(all_staging_configs=True, subset=["clickhouse"]), - ids=lambda x: x.name, -) -def test_clickhouse_destination_merge(destination_config: DestinationTestConfiguration) -> None: - pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) - - try: - - @dlt.resource(name="items") - def items() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "sub_items": [ - {"id": 101, "name": "sub item 101"}, - {"id": 101, "name": "sub item 102"}, - ], - } - - pipeline.run( - items, - loader_file_format=destination_config.file_format, - staging=destination_config.staging, - ) - - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 1 - assert table_counts["items__sub_items"] == 2 - assert table_counts["_dlt_loads"] == 1 - - # Load again with schema evolution. - @dlt.resource(name="items", write_disposition="merge", primary_key="id") - def items2() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "new_field": "hello", - "sub_items": [ - { - "id": 101, - "name": "sub item 101", - "other_new_field": "hello 101", - }, - { - "id": 101, - "name": "sub item 102", - "other_new_field": "hello 102", - }, - ], - } - - pipeline.run(items2) - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 1 - assert table_counts["items__sub_items"] == 2 - assert table_counts["_dlt_loads"] == 2 - - except Exception as e: - raise e - - finally: - with pipeline.sql_client() as client: - client.drop_dataset() From a4ccd2b8eed3f30fac496ca4a0202af1c0c6552f Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 2 Apr 2024 19:52:56 +0200 Subject: [PATCH 042/127] Clickhouse CI #1055 Signed-off-by: Marcel Coetzee --- .../workflows/test_destination_clickhouse.yml | 92 +++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 .github/workflows/test_destination_clickhouse.yml diff --git a/.github/workflows/test_destination_clickhouse.yml b/.github/workflows/test_destination_clickhouse.yml new file mode 100644 index 0000000000..62f83e9d29 --- /dev/null +++ b/.github/workflows/test_destination_clickhouse.yml @@ -0,0 +1,92 @@ +name: test clickhouse + +on: + pull_request: + branches: + - master + - devel + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + DLT_SECRETS_TOML: ${{ secrets.DLT_SECRETS_TOML }} + + RUNTIME__SENTRY_DSN: https://6f6f7b6f8e0f458a89be4187603b55fe@o1061158.ingest.sentry.io/4504819859914752 + RUNTIME__LOG_LEVEL: ERROR + + ACTIVE_DESTINATIONS: "[\"bigquery\"]" + ALL_FILESYSTEM_DRIVERS: "[\"memory\"]" + +jobs: + get_docs_changes: + uses: ./.github/workflows/get_docs_changes.yml + if: ${{ !github.event.pull_request.head.repo.fork }} + + run_loader: + name: Tests Clickhouse loader + needs: get_docs_changes + if: needs.get_docs_changes.outputs.changes_outside_docs == 'true' + strategy: + fail-fast: false + matrix: + os: [ "ubuntu-latest" ] + # os: ["ubuntu-latest", "macos-latest", "windows-latest"] + defaults: + run: + shell: bash + runs-on: ${{ matrix.os }} + + steps: + + - name: Check out + uses: actions/checkout@master + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: "3.10.x" + + - name: Install Poetry + uses: snok/install-poetry@v1.3.2 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: # path: ${{ steps.pip-cache.outputs.dir }} + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}-gcp + + - name: Install dependencies + # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction -E clickhouse --with providers -E parquet --with sentry-sdk --with pipeline + + - name: create secrets.toml + run: pwd && echo "$DLT_SECRETS_TOML" > tests/.dlt/secrets.toml + + - run: | + poetry run pytest tests/helpers/providers tests/load + if: runner.os != 'Windows' + name: Run tests Linux/MAC + - run: | + poetry run pytest tests/helpers/providers tests/load + if: runner.os == 'Windows' + name: Run tests Windows + shell: cmd + + matrix_job_required_check: + name: Clickhouse loader tests + needs: run_loader + runs-on: ubuntu-latest + if: always() + steps: + - name: Check matrix job results + if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') + run: | + echo "One or more matrix job tests failed or were cancelled. You may need to re-run them." && exit 1 From 7f730c1ce58b7113c172f95407d5db695f6282cc Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 2 Apr 2024 20:30:57 +0200 Subject: [PATCH 043/127] Update active destinations to ClickHouse Signed-off-by: Marcel Coetzee --- .github/workflows/test_destination_clickhouse.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_destination_clickhouse.yml b/.github/workflows/test_destination_clickhouse.yml index 62f83e9d29..57cb9f953f 100644 --- a/.github/workflows/test_destination_clickhouse.yml +++ b/.github/workflows/test_destination_clickhouse.yml @@ -17,7 +17,7 @@ env: RUNTIME__SENTRY_DSN: https://6f6f7b6f8e0f458a89be4187603b55fe@o1061158.ingest.sentry.io/4504819859914752 RUNTIME__LOG_LEVEL: ERROR - ACTIVE_DESTINATIONS: "[\"bigquery\"]" + ACTIVE_DESTINATIONS: "[\"clickhouse\"]" ALL_FILESYSTEM_DRIVERS: "[\"memory\"]" jobs: From dc47c12b633f2a57224f1f999cc242ccaf7afd3a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 2 Apr 2024 20:35:42 +0200 Subject: [PATCH 044/127] Expand Clickhouse dependencies in pyproject.toml Signed-off-by: Marcel Coetzee --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 213db8d01d..fd497513ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -104,7 +104,7 @@ mssql = ["pyodbc"] synapse = ["pyodbc", "adlfs", "pyarrow"] qdrant = ["qdrant-client"] databricks = ["databricks-sql-connector"] -clickhouse = ["clickhouse-driver"] +clickhouse = ["clickhouse-driver", "s3fs", "gcsfs", "pyarrow", "adlfs"] [tool.poetry.scripts] dlt = "dlt.cli._dlt:_main" From 34646ad044eaad8c417cf2f780dda0ab5e967bf3 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 2 Apr 2024 20:36:49 +0200 Subject: [PATCH 045/127] Update lock file #1055 Signed-off-by: Marcel Coetzee --- poetry.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 05025b827b..b38984b03c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -9085,7 +9085,7 @@ athena = ["botocore", "pyarrow", "pyathena", "s3fs"] az = ["adlfs"] bigquery = ["gcsfs", "google-cloud-bigquery", "grpcio", "pyarrow"] cli = ["cron-descriptor", "pipdeptree"] -clickhouse = ["clickhouse-driver"] +clickhouse = ["adlfs", "clickhouse-driver", "gcsfs", "pyarrow", "s3fs"] databricks = ["databricks-sql-connector"] dbt = ["dbt-athena-community", "dbt-bigquery", "dbt-core", "dbt-databricks", "dbt-duckdb", "dbt-redshift", "dbt-snowflake"] duckdb = ["duckdb", "duckdb"] @@ -9106,4 +9106,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "689daf5e8e7a187e615f4055009988b80a783d2aeed6f2264c7503668433f02c" +content-hash = "b3976f6c6626123bd0c102534cf01ad7f72682699c3d0380c480531b00c20663" From 8bc3c21f8bb61393c0030027e38c2780a33fcc9b Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 3 Apr 2024 13:34:11 +0200 Subject: [PATCH 046/127] Revert back to merge implementation #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 180 ++++++++++++++++-- tests/load/pipeline/test_clickhouse.py | 71 +++++++ 2 files changed, 238 insertions(+), 13 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 4fb97e51d2..ae2f8a10e1 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -27,7 +27,13 @@ TTableSchemaColumns, TColumnSchemaBase, ) +from dlt.common.schema.utils import ( + get_columns_names_with_prop, + get_first_column_name_with_prop, + get_dedup_sort_tuple, +) from dlt.common.storages import FileStorage +from dlt.destinations.exceptions import MergeDispositionException from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( TTableEngineType, @@ -48,7 +54,7 @@ SqlJobClientBase, ) from dlt.destinations.job_impl import NewReferenceJob, EmptyLoadJob -from dlt.destinations.sql_jobs import SqlStagingCopyJob +from dlt.destinations.sql_jobs import SqlMergeJob from dlt.destinations.type_mapping import TypeMapper @@ -200,6 +206,165 @@ def exception(self) -> str: raise NotImplementedError() +class ClickhouseMergeJob(SqlMergeJob): + @classmethod + def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: + # Different sessions are created during the load process, and temporary tables + # do not persist between sessions. + # Resorting to persisted in-memory table to fix. + # return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" + return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" + + @classmethod + def gen_merge_sql( + cls, table_chain: Sequence[TTableSchema], sql_client: ClickhouseSqlClient # type: ignore[override] + ) -> List[str]: + sql: List[str] = [] + root_table = table_chain[0] + + escape_id = sql_client.capabilities.escape_identifier + escape_lit = sql_client.capabilities.escape_literal + if escape_id is None: + escape_id = DestinationCapabilitiesContext.generic_capabilities().escape_identifier + if escape_lit is None: + escape_lit = DestinationCapabilitiesContext.generic_capabilities().escape_literal + + root_table_name = sql_client.make_qualified_table_name(root_table["name"]) + with sql_client.with_staging_dataset(staging=True): + staging_root_table_name = sql_client.make_qualified_table_name(root_table["name"]) + primary_keys = list( + map( + escape_id, + get_columns_names_with_prop(root_table, "primary_key"), + ) + ) + merge_keys = list( + map( + escape_id, + get_columns_names_with_prop(root_table, "merge_key"), + ) + ) + key_clauses = cls._gen_key_table_clauses(primary_keys, merge_keys) + + unique_column: str = None + root_key_column: str = None + + if len(table_chain) == 1: + key_table_clauses = cls.gen_key_table_clauses( + root_table_name, staging_root_table_name, key_clauses, for_delete=True + ) + sql.extend(f"DELETE {clause};" for clause in key_table_clauses) + else: + key_table_clauses = cls.gen_key_table_clauses( + root_table_name, staging_root_table_name, key_clauses, for_delete=False + ) + unique_columns = get_columns_names_with_prop(root_table, "unique") + if not unique_columns: + raise MergeDispositionException( + sql_client.fully_qualified_dataset_name(), + staging_root_table_name, + [t["name"] for t in table_chain], + f"There is no unique column (ie _dlt_id) in top table {root_table['name']} so" + " it is not possible to link child tables to it.", + ) + unique_column = escape_id(unique_columns[0]) + create_delete_temp_table_sql, delete_temp_table_name = cls.gen_delete_temp_table_sql( + unique_column, key_table_clauses + ) + sql.extend(create_delete_temp_table_sql) + + for table in table_chain[1:]: + table_name = sql_client.make_qualified_table_name(table["name"]) + root_key_columns = get_columns_names_with_prop(table, "root_key") + if not root_key_columns: + raise MergeDispositionException( + sql_client.fully_qualified_dataset_name(), + staging_root_table_name, + [t["name"] for t in table_chain], + "There is no root foreign key (ie _dlt_root_id) in child table" + f" {table['name']} so it is not possible to refer to top level table" + f" {root_table['name']} unique column {unique_column}", + ) + root_key_column = escape_id(root_key_columns[0]) + sql.append( + cls.gen_delete_from_sql( + table_name, root_key_column, delete_temp_table_name, unique_column + ) + ) + + sql.append( + cls.gen_delete_from_sql( + root_table_name, unique_column, delete_temp_table_name, unique_column + ) + ) + + not_deleted_cond: str = None + hard_delete_col = get_first_column_name_with_prop(root_table, "hard_delete") + if hard_delete_col is not None: + not_deleted_cond = f"{escape_id(hard_delete_col)} IS NULL" + if root_table["columns"][hard_delete_col]["data_type"] == "bool": + not_deleted_cond += f" OR {escape_id(hard_delete_col)} = {escape_lit(False)}" + + dedup_sort = get_dedup_sort_tuple(root_table) + + insert_temp_table_name: str = None + if len(table_chain) > 1 and (primary_keys or hard_delete_col is not None): + condition_columns = [hard_delete_col] if not_deleted_cond is not None else None + ( + create_insert_temp_table_sql, + insert_temp_table_name, + ) = cls.gen_insert_temp_table_sql( + staging_root_table_name, + primary_keys, + unique_column, + dedup_sort, + not_deleted_cond, + condition_columns, + ) + sql.extend(create_insert_temp_table_sql) + + to_delete: List[str] = [] + + for table in table_chain: + table_name = sql_client.make_qualified_table_name(table["name"]) + with sql_client.with_staging_dataset(staging=True): + staging_table_name = sql_client.make_qualified_table_name(table["name"]) + + insert_cond = not_deleted_cond if hard_delete_col is not None else "1 = 1" + if ( + primary_keys + and len(table_chain) > 1 + or not primary_keys + and table.get("parent") is not None + and hard_delete_col is not None + ): + uniq_column = unique_column if table.get("parent") is None else root_key_column + insert_cond = f"{uniq_column} IN (SELECT * FROM {insert_temp_table_name})" + + columns = list(map(escape_id, get_columns_names_with_prop(table, "name"))) + col_str = ", ".join(columns) + select_sql = f"SELECT {col_str} FROM {staging_table_name} WHERE {insert_cond}" + if primary_keys and len(table_chain) == 1: + select_sql = cls.gen_select_from_dedup_sql( + staging_table_name, primary_keys, columns, dedup_sort, insert_cond + ) + + sql.extend([f"INSERT INTO {table_name}({col_str}) {select_sql};"]) + + if table_name is not None and table_name.startswith("delete_"): + to_delete.extend([table_name]) + if insert_temp_table_name is not None and insert_temp_table_name.startswith("delete_"): + to_delete.extend([insert_temp_table_name]) + + # TODO: Doesn't remove all `delete_` tables. + for delete_table_name in to_delete: + sql.extend( + [f"DROP TABLE IF EXISTS {sql_client.make_qualified_table_name(delete_table_name)};"] + ) + + return sql + + class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() @@ -319,16 +484,5 @@ def _from_db_type( def restore_file_load(self, file_path: str) -> LoadJob: return EmptyLoadJob.from_file_path(file_path, "completed") - def _create_append_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: - return [ - SqlStagingCopyJob.from_table_chain(table_chain, self.sql_client, {"replace": False}) - ] - - def _create_replace_followup_jobs( - self, table_chain: Sequence[TTableSchema] - ) -> List[NewLoadJob]: - return [SqlStagingCopyJob.from_table_chain(table_chain, self.sql_client, {"replace": True})] - def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: - # Fall back to append jobs for merge. - return self._create_append_followup_jobs(table_chain) + return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index 4a5903137b..1fd834389f 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -81,3 +81,74 @@ def items2() -> Iterator[TDataItem]: finally: with pipeline.sql_client() as client: client.drop_dataset() + + +@pytest.mark.parametrize( + "destination_config", + destinations_configs(all_staging_configs=True, subset=["clickhouse"]), + ids=lambda x: x.name, +) +def test_clickhouse_destination_merge(destination_config: DestinationTestConfiguration) -> None: + pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) + + try: + + @dlt.resource(name="items") + def items() -> Iterator[TDataItem]: + yield { + "id": 1, + "name": "item", + "sub_items": [ + {"id": 101, "name": "sub item 101"}, + {"id": 101, "name": "sub item 102"}, + ], + } + + pipeline.run( + items, + loader_file_format=destination_config.file_format, + staging=destination_config.staging, + ) + + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 + assert table_counts["_dlt_loads"] == 1 + + # Load again with schema evolution. + @dlt.resource(name="items", write_disposition="merge", primary_key="id") + def items2() -> Iterator[TDataItem]: + yield { + "id": 1, + "name": "item", + "new_field": "hello", + "sub_items": [ + { + "id": 101, + "name": "sub item 101", + "other_new_field": "hello 101", + }, + { + "id": 101, + "name": "sub item 102", + "other_new_field": "hello 102", + }, + ], + } + + pipeline.run(items2) + table_counts = load_table_counts( + pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] + ) + assert table_counts["items"] == 1 + assert table_counts["items__sub_items"] == 2 + assert table_counts["_dlt_loads"] == 2 + + except Exception as e: + raise e + + finally: + with pipeline.sql_client() as client: + client.drop_dataset() From 99e82ffd13bde850f2aa917ed486f133cda2efe1 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 3 Apr 2024 13:47:44 +0200 Subject: [PATCH 047/127] Add default sql test #1055 Signed-off-by: Marcel Coetzee --- tests/load/pipeline/test_clickhouse.py | 4 ++-- tests/load/utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index 1fd834389f..a06277ef80 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -14,7 +14,7 @@ @pytest.mark.parametrize( "destination_config", - destinations_configs(all_staging_configs=True, subset=["clickhouse"]), + destinations_configs(default_sql_configs=True, all_staging_configs=True, subset=["clickhouse"]), ids=lambda x: x.name, ) def test_clickhouse_destination_append(destination_config: DestinationTestConfiguration) -> None: @@ -85,7 +85,7 @@ def items2() -> Iterator[TDataItem]: @pytest.mark.parametrize( "destination_config", - destinations_configs(all_staging_configs=True, subset=["clickhouse"]), + destinations_configs(default_sql_configs=True, all_staging_configs=True, subset=["clickhouse"]), ids=lambda x: x.name, ) def test_clickhouse_destination_merge(destination_config: DestinationTestConfiguration) -> None: diff --git a/tests/load/utils.py b/tests/load/utils.py index b043a979cb..5e376139b6 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -172,7 +172,7 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration(destination=destination) for destination in SQL_DESTINATIONS - if destination not in ("athena", "mssql", "synapse", "databricks", "clickhouse") + if destination not in ("athena", "mssql", "synapse", "databricks") ] destination_configs += [ DestinationTestConfiguration(destination="duckdb", file_format="parquet") From 6bded9a592475bd9ecfd2c821e111f9e9118e5dd Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 3 Apr 2024 14:11:13 +0200 Subject: [PATCH 048/127] Support jsonlines Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 8 ++--- tests/load/utils.py | 32 ++++++++++++++++++++ 2 files changed, 36 insertions(+), 4 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 445a2f12a9..c621f8db43 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -7,11 +7,11 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() - - caps.preferred_loader_file_format = "parquet" - caps.supported_loader_file_formats = ["parquet"] + # Clickhouse only supports loading from staged files on s3 for now. + caps.preferred_loader_file_format = "insert_values" + caps.supported_loader_file_formats = ["parquet", "jsonl", "insert_values"] caps.preferred_staging_file_format = "parquet" - caps.supported_staging_file_formats = ["parquet"] + caps.supported_staging_file_formats = ["parquet", "jsonl"] caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal diff --git a/tests/load/utils.py b/tests/load/utils.py index 5e376139b6..b9ac349f42 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -197,6 +197,14 @@ def destinations_configs( extra_info="iceberg", ) ] + destination_configs += [ + DestinationTestConfiguration( + destination="clickhouse", + file_format="parquet", + bucket_url=AWS_BUCKET, + extra_info="s3-authorization", + ) + ] destination_configs += [ DestinationTestConfiguration( destination="databricks", @@ -320,6 +328,30 @@ def destinations_configs( bucket_url=AZ_BUCKET, extra_info="az-authorization", ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=AZ_BUCKET, + extra_info="az-authorization", + disable_compression=True, + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=GCS_BUCKET, + extra_info="gcs-authorization", + disable_compression=True, + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=AWS_BUCKET, + extra_info="s3-authorization", + disable_compression=True, + ), ] if all_staging_configs: From eca4d2d9d04a6ea9925e0ac2496cc7ac5b8292ab Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 3 Apr 2024 17:25:17 +0200 Subject: [PATCH 049/127] Revert non-applicable changes #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 4 +- .../impl/clickhouse/clickhouse.py | 64 ++++++++++--------- .../impl/clickhouse/sql_client.py | 5 +- dlt/sources/helpers/rest_client/paginators.py | 15 +++++ 4 files changed, 54 insertions(+), 34 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index c621f8db43..035e799d38 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -8,9 +8,9 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() # Clickhouse only supports loading from staged files on s3 for now. - caps.preferred_loader_file_format = "insert_values" + caps.preferred_loader_file_format = "jsonl" caps.supported_loader_file_formats = ["parquet", "jsonl", "insert_values"] - caps.preferred_staging_file_format = "parquet" + caps.preferred_staging_file_format = "jsonl" caps.supported_staging_file_formats = ["parquet", "jsonl"] caps.escape_identifier = escape_clickhouse_identifier diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index ae2f8a10e1..4910b9c0f6 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -49,8 +49,8 @@ FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING, SUPPORTED_FILE_FORMATS, ) +from dlt.destinations.insert_job_client import InsertValuesJobClient from dlt.destinations.job_client_impl import ( - SqlJobClientWithStaging, SqlJobClientBase, ) from dlt.destinations.job_impl import NewReferenceJob, EmptyLoadJob @@ -365,7 +365,7 @@ def gen_merge_sql( return sql -class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): +class ClickhouseClient(InsertValuesJobClient, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() def __init__( @@ -381,8 +381,34 @@ def __init__( self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) self.type_mapper = ClickhouseTypeMapper(self.capabilities) + def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: + return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] + + def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: + # Build column definition. + # The primary key and sort order definition is defined outside column specification. + hints_str = " ".join( + self.active_hints.get(hint) + for hint in self.active_hints.keys() + if c.get(hint, False) is True + and hint not in ("primary_key", "sort") + and hint in self.active_hints + ) + + # Alter table statements only accept `Nullable` modifiers. + type_with_nullability_modifier = ( + f"Nullable({self.type_mapper.to_db_type(c)})" + if c.get("nullable", True) + else self.type_mapper.to_db_type(c) + ) + + return ( + f"{self.capabilities.escape_identifier(c['name'])} {type_with_nullability_modifier} {hints_str}" + .strip() + ) + def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> LoadJob: - return super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( + job = super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( file_path, table["name"], self.sql_client, @@ -390,6 +416,11 @@ def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> self.config.staging_config.credentials if self.config.staging_config else None ), ) + if not job: + assert NewReferenceJob.is_reference_job( + file_path + ), "Clickhouse must use staging to load files." + return job def _get_table_update_sql( self, table_name: str, new_columns: Sequence[TColumnSchema], generate_alter: bool @@ -419,29 +450,6 @@ def _get_table_update_sql( return sql - def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: - # Build column definition. - # The primary key and sort order definition is defined outside column specification. - hints_str = " ".join( - self.active_hints.get(hint) - for hint in self.active_hints.keys() - if c.get(hint, False) is True - and hint not in ("primary_key", "sort") - and hint in self.active_hints - ) - - # Alter table statements only accept `Nullable` modifiers. - type_with_nullability_modifier = ( - f"Nullable({self.type_mapper.to_db_type(c)})" - if c.get("nullable", True) - else self.type_mapper.to_db_type(c) - ) - - return ( - f"{self.capabilities.escape_identifier(c['name'])} {type_with_nullability_modifier} {hints_str}" - .strip() - ) - def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns]: fields = self._get_storage_table_query_columns() db_params = self.sql_client.make_qualified_table_name(table_name, escape=False).split( @@ -471,6 +479,7 @@ def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns] return True, schema_table # Clickhouse fields are not nullable by default. + @staticmethod def _gen_not_null(v: bool) -> str: # We use the `Nullable` modifier instead of NULL / NOT NULL modifiers to cater for ALTER statement. @@ -483,6 +492,3 @@ def _from_db_type( def restore_file_load(self, file_path: str) -> LoadJob: return EmptyLoadJob.from_file_path(file_path, "completed") - - def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: - return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 6641f7b752..74d0b217a0 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -57,11 +57,10 @@ def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: dsn=self.credentials.to_native_representation() ) with self._conn.cursor() as cur: - # Set session settings. There doesn't seem to be a way to set these - # without using the library's top-level, non-dbapi2 client. + # Toggle experimental settings. + # These are necessary for nested datatypes and other operations to work. cur.execute("set allow_experimental_object_type = 1") cur.execute("set allow_experimental_lightweight_delete = 1") - return self._conn @raise_open_connection_error diff --git a/dlt/sources/helpers/rest_client/paginators.py b/dlt/sources/helpers/rest_client/paginators.py index 65605b7dee..11a28c22ea 100644 --- a/dlt/sources/helpers/rest_client/paginators.py +++ b/dlt/sources/helpers/rest_client/paginators.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod from typing import Optional +from urllib.parse import urlparse, urljoin from dlt.sources.helpers.requests import Response, Request from dlt.common import jsonpath @@ -85,6 +86,14 @@ def update_state(self, response: Response) -> None: if total is None: raise ValueError(f"Total count not found in response for {self.__class__.__name__}") + try: + total = int(total) + except ValueError: + raise ValueError( + f"Total count is not an integer in response for {self.__class__.__name__}. " + f"Expected an integer, got {total}" + ) + self.offset += self.limit if self.offset >= total: @@ -100,6 +109,12 @@ def update_request(self, request: Request) -> None: class BaseNextUrlPaginator(BasePaginator): def update_request(self, request: Request) -> None: + # Handle relative URLs + if self.next_reference: + parsed_url = urlparse(self.next_reference) + if not parsed_url.scheme: + self.next_reference = urljoin(request.url, self.next_reference) + request.url = self.next_reference From 092a524e0e1e003abd01d390e921cc36f4d2742f Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 4 Apr 2024 17:38:28 +0200 Subject: [PATCH 050/127] Fix 'from_db_type' #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 2 +- .../impl/clickhouse/clickhouse.py | 42 +++++++++++++++---- .../impl/clickhouse/sql_client.py | 12 +++--- .../test_clickhouse_configuration.py | 4 +- .../test_clickhouse_table_builder.py | 20 ++++----- tests/load/clickhouse/test_utls.py | 30 ++++++------- tests/load/test_job_client.py | 7 ++++ tests/load/utils.py | 4 +- 8 files changed, 77 insertions(+), 44 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 035e799d38..6136e0078d 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -9,7 +9,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() # Clickhouse only supports loading from staged files on s3 for now. caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["parquet", "jsonl", "insert_values"] + caps.supported_loader_file_formats = ["parquet", "jsonl"] caps.preferred_staging_file_format = "jsonl" caps.supported_staging_file_formats = ["parquet", "jsonl"] diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 4910b9c0f6..1007d8ea45 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -1,4 +1,5 @@ import os +import re from copy import deepcopy from typing import ClassVar, Optional, Dict, List, Sequence, cast, Tuple from urllib.parse import urlparse @@ -72,13 +73,12 @@ class ClickhouseTypeMapper(TypeMapper): sct_to_unbound_dbt = { - "complex": "String", + "complex": "JSON", "text": "String", "double": "Float64", "bool": "Boolean", "date": "Date", "timestamp": "DateTime('UTC')", - "time": "Time('UTC')", "bigint": "Int64", "binary": "String", "wei": "Decimal", @@ -87,21 +87,19 @@ class ClickhouseTypeMapper(TypeMapper): sct_to_dbt = { "decimal": "Decimal(%i,%i)", "wei": "Decimal(%i,%i)", - "timestamp": "DateTime(%i, 'UTC')", - "time": "Time(%i ,'UTC')", + "timestamp": "DateTime(%i,'UTC')", } dbt_to_sct = { "String": "text", "Float64": "double", - "Boolean": "bool", + "Bool": "bool", "Date": "date", "DateTime": "timestamp", - "DateTime('UTC')": "timestamp", + "DateTime64": "timestamp", "Time": "timestamp", - "Time('UTC')": "timestamp", "Int64": "bigint", - "JSON": "complex", + "Object('json')": "complex", "Decimal": "decimal", } @@ -111,8 +109,33 @@ def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = def from_db_type( self, db_type: str, precision: Optional[int] = None, scale: Optional[int] = None ) -> TColumnType: + # Remove "Nullable" wrapper. + db_type = re.sub(r"^Nullable\((?P.+)\)$", r"\g", db_type) + + # Remove timezone details. + if db_type == "DateTime('UTC')": + db_type = "DateTime" + if datetime_match := re.match( + r"DateTime64(?:\((?P\d+)(?:,?\s*'(?PUTC)')?\))?", db_type + ): + if datetime_match["precision"]: + precision = int(datetime_match["precision"]) + else: + precision = None + db_type = "DateTime64" + + # Extract precision and scale, parameters and remove from string. + if decimal_match := re.match( + r"Decimal\((?P\d+)\s*(?:,\s*(?P\d+))?\)", db_type + ): + precision, scale = decimal_match.groups() # type: ignore[assignment] + precision = int(precision) + scale = int(scale) if scale else 0 + db_type = "Decimal" + if db_type == "Decimal" and (precision, scale) == self.capabilities.wei_precision: return dict(data_type="wei") + return super().from_db_type(db_type, precision, scale) @@ -396,9 +419,10 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non ) # Alter table statements only accept `Nullable` modifiers. + # JSON type isn't nullable in Clickhouse. type_with_nullability_modifier = ( f"Nullable({self.type_mapper.to_db_type(c)})" - if c.get("nullable", True) + if c.get("nullable", True) and c.get("data_type") != "complex" else self.type_mapper.to_db_type(c) ) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 74d0b217a0..a788ccdad0 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -56,11 +56,6 @@ def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: self._conn = clickhouse_driver.dbapi.connect( dsn=self.credentials.to_native_representation() ) - with self._conn.cursor() as cur: - # Toggle experimental settings. - # These are necessary for nested datatypes and other operations to work. - cur.execute("set allow_experimental_object_type = 1") - cur.execute("set allow_experimental_lightweight_delete = 1") return self._conn @raise_open_connection_error @@ -133,6 +128,13 @@ def execute_query( query, db_args = _convert_to_old_pyformat(query, args, OperationalError) db_args.update(kwargs) + # Prefix each query transaction with experimental settings. + # These are necessary for nested datatypes to be available and other operations to work. + query = ( + "set allow_experimental_lightweight_delete = 1;" + "set allow_experimental_object_type = 1;" + f"{query}" + ) with self._conn.cursor() as cursor: for query_line in query.split(";"): if query_line := query_line.strip(): diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index d6b41c0189..1b6fab2f28 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -17,7 +17,7 @@ from tests.common.configuration.utils import environment -def test_connection_string_with_all_params() -> None: +def test_clickhouse_connection_string_with_all_params() -> None: url = "clickhouse://user1:pass1@host1:9000/testdb?secure=0&connect_timeout=230&send_receive_timeout=1000" creds = ClickhouseCredentials() # type: ignore @@ -50,7 +50,7 @@ def test_clickhouse_configuration() -> None: @pytest.mark.usefixtures("environment") -def test_gcp_hmac_getter_accessor(environment: Any) -> None: +def test_clickhouse_gcp_hmac_getter_accessor(environment: Any) -> None: environment["DESTINATION__FILESYSTEM__CREDENTIALS__GCP_ACCESS_KEY_ID"] = "25g08jaDJacj42" environment["DESTINATION__FILESYSTEM__CREDENTIALS__GCP_SECRET_ACCESS_KEY"] = "ascvntp45uasdf" diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 7efab56464..0d1ba2f334 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -48,7 +48,7 @@ def test_clickhouse_configuration() -> None: assert ClickhouseClientConfiguration(credentials=c).fingerprint() == digest128("host1") -def test_create_table(clickhouse_client: ClickhouseClient) -> None: +def test_clickhouse_create_table(clickhouse_client: ClickhouseClient) -> None: statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) assert len(statements) == 1 sql = statements[0] @@ -66,7 +66,7 @@ def test_create_table(clickhouse_client: ClickhouseClient) -> None: assert "`col6` Decimal(38,9)" in sql assert "`col7` String" in sql assert "`col8` Decimal(76,0)" in sql - assert "`col9` String" in sql + assert "`col9` JSON" in sql assert "`col10` Date" in sql assert "`col11` DateTime" in sql assert "`col1_null` Nullable(Int64)" in sql @@ -77,18 +77,18 @@ def test_create_table(clickhouse_client: ClickhouseClient) -> None: assert "`col6_null` Nullable(Decimal(38,9))" in sql assert "`col7_null` Nullable(String)" in sql assert "`col8_null` Nullable(Decimal(76,0))" in sql - assert "`col9_null` Nullable(String)" in sql + assert "`col9_null` JSON" in sql # JSON isn't nullable in clickhouse assert "`col10_null` Nullable(Date)" in sql assert "`col11_null` Nullable(DateTime)" in sql assert "`col1_precision` Int64" in sql - assert "`col4_precision` DateTime(3, 'UTC')" in sql + assert "`col4_precision` DateTime(3,'UTC')" in sql assert "`col5_precision` String" in sql assert "`col6_precision` Decimal(6,2)" in sql assert "`col7_precision` String" in sql assert "`col11_precision` DateTime" in sql -def test_alter_table(clickhouse_client: ClickhouseClient) -> None: +def test_clickhouse_alter_table(clickhouse_client: ClickhouseClient) -> None: statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, True) assert len(statements) == 1 sql = statements[0] @@ -108,7 +108,7 @@ def test_alter_table(clickhouse_client: ClickhouseClient) -> None: assert "`col6` Decimal(38,9)" in sql assert "`col7` String" in sql assert "`col8` Decimal(76,0)" in sql - assert "`col9` String" in sql + assert "`col9` JSON" in sql assert "`col10` Date" in sql assert "`col11` DateTime" in sql assert "`col1_null` Nullable(Int64)" in sql @@ -119,11 +119,11 @@ def test_alter_table(clickhouse_client: ClickhouseClient) -> None: assert "`col6_null` Nullable(Decimal(38,9))" in sql assert "`col7_null` Nullable(String)" in sql assert "`col8_null` Nullable(Decimal(76,0))" in sql - assert "`col9_null` Nullable(String)" in sql + assert "`col9_null` JSON" in sql assert "`col10_null` Nullable(Date)" in sql assert "`col11_null` Nullable(DateTime)" in sql assert "`col1_precision` Int64" in sql - assert "`col4_precision` DateTime(3, 'UTC')" in sql + assert "`col4_precision` DateTime(3,'UTC')" in sql assert "`col5_precision` String" in sql assert "`col6_precision` Decimal(6,2)" in sql assert "`col7_precision` String" in sql @@ -138,7 +138,7 @@ def test_alter_table(clickhouse_client: ClickhouseClient) -> None: @pytest.mark.usefixtures("empty_schema") -def test_create_table_with_primary_keys(clickhouse_client: ClickhouseClient) -> None: +def test_clickhouse_create_table_with_primary_keys(clickhouse_client: ClickhouseClient) -> None: mod_update = deepcopy(TABLE_UPDATE) mod_update[1]["primary_key"] = True @@ -154,7 +154,7 @@ def test_create_table_with_primary_keys(clickhouse_client: ClickhouseClient) -> "Only `primary_key` hint has been implemented so far, which isn't specified inline with the" " column definition." ) -def test_create_table_with_hints(client: ClickhouseClient) -> None: +def test_clickhouse_create_table_with_hints(client: ClickhouseClient) -> None: mod_update = deepcopy(TABLE_UPDATE) mod_update[0]["primary_key"] = True diff --git a/tests/load/clickhouse/test_utls.py b/tests/load/clickhouse/test_utls.py index 91cb5b7ec7..ea14c90daa 100644 --- a/tests/load/clickhouse/test_utls.py +++ b/tests/load/clickhouse/test_utls.py @@ -6,19 +6,19 @@ ) -def test_convert_s3_url_to_http() -> None: +def test_clickhouse_convert_s3_url_to_http() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3.amazonaws.com/path/to/file.txt" assert convert_storage_to_http_scheme(s3_url) == expected_http_url -def test_convert_s3_url_to_https() -> None: +def test_clickhouse_convert_s3_url_to_https() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3.amazonaws.com/path/to/file.txt" assert convert_storage_to_http_scheme(s3_url, use_https=True) == expected_https_url -def test_convert_gs_url_to_http() -> None: +def test_clickhouse_convert_gs_url_to_http() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.storage.googleapis.com/path/to/file.txt" assert convert_storage_to_http_scheme(gs_url) == expected_http_url @@ -27,7 +27,7 @@ def test_convert_gs_url_to_http() -> None: assert convert_storage_to_http_scheme(gcs_url) == expected_http_url -def test_convert_gs_url_to_https() -> None: +def test_clickhouse_convert_gs_url_to_https() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.storage.googleapis.com/path/to/file.txt" assert convert_storage_to_http_scheme(gs_url, use_https=True) == expected_https_url @@ -36,13 +36,13 @@ def test_convert_gs_url_to_https() -> None: assert convert_storage_to_http_scheme(gcs_url, use_https=True) == expected_https_url -def test_convert_s3_url_to_http_with_region() -> None: +def test_clickhouse_convert_s3_url_to_http_with_region() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3-us-west-2.amazonaws.com/path/to/file.txt" assert convert_storage_to_http_scheme(s3_url, region="us-west-2") == expected_http_url -def test_convert_s3_url_to_https_with_region() -> None: +def test_clickhouse_convert_s3_url_to_https_with_region() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.s3-us-east-1.amazonaws.com/path/to/file.txt" assert ( @@ -51,7 +51,7 @@ def test_convert_s3_url_to_https_with_region() -> None: ) -def test_convert_s3_url_to_http_with_endpoint() -> None: +def test_clickhouse_convert_s3_url_to_http_with_endpoint() -> None: s3_url: str = "s3://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.s3.custom-endpoint.com/path/to/file.txt" assert ( @@ -69,7 +69,7 @@ def test_convert_s3_url_to_https_with_endpoint() -> None: ) -def test_convert_gs_url_to_http_with_endpoint() -> None: +def test_clickhouse_convert_gs_url_to_http_with_endpoint() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_http_url: str = "http://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( @@ -82,7 +82,7 @@ def test_convert_gs_url_to_http_with_endpoint() -> None: ) -def test_convert_gs_url_to_https_with_endpoint() -> None: +def test_clickhouse_convert_gs_url_to_https_with_endpoint() -> None: gs_url: str = "gs://my-bucket/path/to/file.txt" expected_https_url: str = "https://my-bucket.custom-endpoint.com/path/to/file.txt" assert ( @@ -97,7 +97,7 @@ def test_convert_gs_url_to_https_with_endpoint() -> None: ) -def test_render_with_credentials_jsonl() -> None: +def test_clickhouse_render_with_credentials_jsonl() -> None: url = "https://example.com/data.jsonl" access_key_id = "test_access_key" secret_access_key = "test_secret_key" @@ -111,7 +111,7 @@ def test_render_with_credentials_jsonl() -> None: ) -def test_render_with_credentials_parquet() -> None: +def test_clickhouse_render_with_credentials_parquet() -> None: url = "https://example.com/data.parquet" access_key_id = "test_access_key" secret_access_key = "test_secret_key" @@ -125,14 +125,14 @@ def test_render_with_credentials_parquet() -> None: ) -def test_render_without_credentials() -> None: +def test_clickhouse_render_without_credentials() -> None: url = "https://example.com/data.jsonl" file_format = "jsonl" expected_output = """s3('https://example.com/data.jsonl',NOSIGN,'JSONEachRow')""" assert render_object_storage_table_function(url, file_format=file_format) == expected_output # type: ignore[arg-type] -def test_render_invalid_file_format() -> None: +def test_clickhouse_render_invalid_file_format() -> None: url = "https://example.com/data.unknown" access_key_id = "test_access_key" secret_access_key = "test_secret_key" @@ -142,13 +142,13 @@ def test_render_invalid_file_format() -> None: assert "Clickhouse s3/gcs staging only supports 'parquet' and 'jsonl'." == str(excinfo.value) -def test_invalid_url_format() -> None: +def test_clickhouse_invalid_url_format() -> None: with pytest.raises(Exception) as exc_info: convert_storage_to_http_scheme("invalid-url") assert str(exc_info.value) == "Error converting storage URL to HTTP protocol: 'invalid-url'" -def test_render_missing_url() -> None: +def test_clickhouse_render_missing_url() -> None: with pytest.raises(TypeError) as excinfo: render_object_storage_table_function() # type: ignore assert "missing 1 required positional argument: 'url'" in str(excinfo.value) diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index 2e23086f81..6f2634129a 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -392,6 +392,13 @@ def test_get_storage_table_with_all_types(client: SqlJobClientBase) -> None: continue if client.config.destination_type == "databricks" and c["data_type"] in ("complex", "time"): continue + # Clickhouse has no active data type for binary or time type. + # TODO: JSON type is available, but not nullable in Clickhouse. + if client.config.destination_type == "clickhouse": + if c["data_type"] in ("binary", "time"): + continue + elif c["data_type"] == "complex" and c["nullable"]: + continue assert c["data_type"] == expected_c["data_type"] diff --git a/tests/load/utils.py b/tests/load/utils.py index b9ac349f42..b3ae9cfe7c 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -172,7 +172,7 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration(destination=destination) for destination in SQL_DESTINATIONS - if destination not in ("athena", "mssql", "synapse", "databricks") + if destination not in ("athena", "mssql", "synapse", "databricks", "clickhouse") ] destination_configs += [ DestinationTestConfiguration(destination="duckdb", file_format="parquet") @@ -200,7 +200,7 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration( destination="clickhouse", - file_format="parquet", + file_format="jsonl", bucket_url=AWS_BUCKET, extra_info="s3-authorization", ) From 79d9b80f94412d374de92510413e858ca82754c7 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 4 Apr 2024 18:14:41 +0200 Subject: [PATCH 051/127] Remove unused tests #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 35 +++++---- tests/load/pipeline/test_clickhouse.py | 71 ------------------- tests/load/utils.py | 5 ++ 3 files changed, 25 insertions(+), 86 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 1007d8ea45..4d1734189b 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -166,6 +166,10 @@ def __init__( if file_extension not in ["parquet", "jsonl"]: raise ValueError("Clickhouse staging only supports 'parquet' and 'jsonl' file formats.") + print("File Path:", file_path) + print("Table Name:", table_name) + print("Bucket Path:", bucket_path) + if not bucket_path: # Local filesystem. raise NotImplementedError("Only object storage is supported.") @@ -197,26 +201,27 @@ def __init__( ) elif bucket_scheme in ("az", "abfs"): - if isinstance(staging_credentials, AzureCredentialsWithoutDefaults): - # Authenticated access. - account_name = staging_credentials.azure_storage_account_name - storage_account_url = f"https://{staging_credentials.azure_storage_account_name}.blob.core.windows.net" - account_key = staging_credentials.azure_storage_account_key - container_name = bucket_url.netloc - blobpath = bucket_url.path - - clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - - table_function = ( - f"azureBlobStorage('{storage_account_url}','{container_name}','{ blobpath }','{ account_name }','{ account_key }','{ clickhouse_format}')" - ) - - else: + if not isinstance( + staging_credentials, AzureCredentialsWithoutDefaults + ): # Unsigned access. raise NotImplementedError( "Unsigned Azure Blob Storage access from Clickhouse isn't supported as yet." ) + # Authenticated access. + account_name = staging_credentials.azure_storage_account_name + storage_account_url = f"https://{staging_credentials.azure_storage_account_name}.blob.core.windows.net" + account_key = staging_credentials.azure_storage_account_key + container_name = bucket_url.netloc + blobpath = bucket_url.path + + clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] + + table_function = ( + f"azureBlobStorage('{storage_account_url}','{container_name}','{ blobpath }','{ account_name }','{ account_key }','{ clickhouse_format}')" + ) + with client.begin_transaction(): client.execute_sql( f"""INSERT INTO {qualified_table_name} SELECT * FROM {table_function}""" diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index a06277ef80..9d6c6ed8d7 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -81,74 +81,3 @@ def items2() -> Iterator[TDataItem]: finally: with pipeline.sql_client() as client: client.drop_dataset() - - -@pytest.mark.parametrize( - "destination_config", - destinations_configs(default_sql_configs=True, all_staging_configs=True, subset=["clickhouse"]), - ids=lambda x: x.name, -) -def test_clickhouse_destination_merge(destination_config: DestinationTestConfiguration) -> None: - pipeline = destination_config.setup_pipeline(f"clickhouse_{uniq_id()}", full_refresh=True) - - try: - - @dlt.resource(name="items") - def items() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "sub_items": [ - {"id": 101, "name": "sub item 101"}, - {"id": 101, "name": "sub item 102"}, - ], - } - - pipeline.run( - items, - loader_file_format=destination_config.file_format, - staging=destination_config.staging, - ) - - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 1 - assert table_counts["items__sub_items"] == 2 - assert table_counts["_dlt_loads"] == 1 - - # Load again with schema evolution. - @dlt.resource(name="items", write_disposition="merge", primary_key="id") - def items2() -> Iterator[TDataItem]: - yield { - "id": 1, - "name": "item", - "new_field": "hello", - "sub_items": [ - { - "id": 101, - "name": "sub item 101", - "other_new_field": "hello 101", - }, - { - "id": 101, - "name": "sub item 102", - "other_new_field": "hello 102", - }, - ], - } - - pipeline.run(items2) - table_counts = load_table_counts( - pipeline, *[t["name"] for t in pipeline.default_schema._schema_tables.values()] - ) - assert table_counts["items"] == 1 - assert table_counts["items__sub_items"] == 2 - assert table_counts["_dlt_loads"] == 2 - - except Exception as e: - raise e - - finally: - with pipeline.sql_client() as client: - client.drop_dataset() diff --git a/tests/load/utils.py b/tests/load/utils.py index b3ae9cfe7c..078c26bf71 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -203,6 +203,7 @@ def destinations_configs( file_format="jsonl", bucket_url=AWS_BUCKET, extra_info="s3-authorization", + disable_compression=True, ) ] destination_configs += [ @@ -306,6 +307,7 @@ def destinations_configs( file_format="parquet", bucket_url=AZ_BUCKET, extra_info="az-authorization", + disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -313,6 +315,7 @@ def destinations_configs( file_format="parquet", bucket_url=GCS_BUCKET, extra_info="gcs-authorization", + disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -320,6 +323,7 @@ def destinations_configs( file_format="parquet", bucket_url=AWS_BUCKET, extra_info="s3-authorization", + disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -327,6 +331,7 @@ def destinations_configs( file_format="parquet", bucket_url=AZ_BUCKET, extra_info="az-authorization", + disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", From c5d8709a377cd8cefc32bc5869775f8b6fb531ce Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 4 Apr 2024 23:53:12 +0200 Subject: [PATCH 052/127] No staging test case #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 33 ++++++++++++------- tests/load/utils.py | 2 -- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 4d1734189b..a6b9d8da78 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -5,6 +5,7 @@ from urllib.parse import urlparse import dlt +from dlt import config from dlt.common.configuration.specs import ( CredentialsConfiguration, AzureCredentialsWithoutDefaults, @@ -34,7 +35,7 @@ get_dedup_sort_tuple, ) from dlt.common.storages import FileStorage -from dlt.destinations.exceptions import MergeDispositionException +from dlt.destinations.exceptions import MergeDispositionException, LoadJobTerminalException from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( TTableEngineType, @@ -163,12 +164,19 @@ def __init__( file_extension = os.path.splitext(file_name)[1][ 1: ].lower() # Remove dot (.) from file extension. + if file_extension not in ["parquet", "jsonl"]: - raise ValueError("Clickhouse staging only supports 'parquet' and 'jsonl' file formats.") + raise LoadJobTerminalException( + file_path, "Clickhouse loader Only supports parquet and jsonl files." + ) - print("File Path:", file_path) - print("Table Name:", table_name) - print("Bucket Path:", bucket_path) + if not config.get("data_writer.disable_compression"): + raise LoadJobTerminalException( + file_path, + "Clickhouse loader does not support gzip compressed files. Please disable" + " compression in the data writer configuration:" + " https://dlthub.com/docs/reference/performance#disabling-and-enabling-file-compression.", + ) if not bucket_path: # Local filesystem. @@ -180,6 +188,7 @@ def __init__( file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) table_function: str + table_function = "" if bucket_scheme in ("s3", "gs", "gcs"): bucket_http_url = convert_storage_to_http_scheme(bucket_url) @@ -201,17 +210,17 @@ def __init__( ) elif bucket_scheme in ("az", "abfs"): - if not isinstance( - staging_credentials, AzureCredentialsWithoutDefaults - ): - # Unsigned access. - raise NotImplementedError( - "Unsigned Azure Blob Storage access from Clickhouse isn't supported as yet." + if not isinstance(staging_credentials, AzureCredentialsWithoutDefaults): + raise LoadJobTerminalException( + file_path, + "Unsigned Azure Blob Storage access from Clickhouse isn't supported as yet.", ) # Authenticated access. account_name = staging_credentials.azure_storage_account_name - storage_account_url = f"https://{staging_credentials.azure_storage_account_name}.blob.core.windows.net" + storage_account_url = ( + f"https://{staging_credentials.azure_storage_account_name}.blob.core.windows.net" + ) account_key = staging_credentials.azure_storage_account_key container_name = bucket_url.netloc blobpath = bucket_url.path diff --git a/tests/load/utils.py b/tests/load/utils.py index 078c26bf71..93055cbd2b 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -201,8 +201,6 @@ def destinations_configs( DestinationTestConfiguration( destination="clickhouse", file_format="jsonl", - bucket_url=AWS_BUCKET, - extra_info="s3-authorization", disable_compression=True, ) ] From 594accc2399f3dd4515cc79228a979ef982c86a1 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 00:45:24 +0200 Subject: [PATCH 053/127] Minor changes Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 43 ++++++++++++------- dlt/destinations/impl/clickhouse/utils.py | 2 +- 2 files changed, 28 insertions(+), 17 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index a6b9d8da78..38975e38b4 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -178,17 +178,15 @@ def __init__( " https://dlthub.com/docs/reference/performance#disabling-and-enabling-file-compression.", ) - if not bucket_path: - # Local filesystem. - raise NotImplementedError("Only object storage is supported.") - bucket_url = urlparse(bucket_path) bucket_scheme = bucket_url.scheme file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) - table_function: str + clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] + table_function: str table_function = "" + if bucket_scheme in ("s3", "gs", "gcs"): bucket_http_url = convert_storage_to_http_scheme(bucket_url) @@ -225,15 +223,29 @@ def __init__( container_name = bucket_url.netloc blobpath = bucket_url.path - clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - table_function = ( - f"azureBlobStorage('{storage_account_url}','{container_name}','{ blobpath }','{ account_name }','{ account_key }','{ clickhouse_format}')" + f"SELECT * FROM azureBlobStorage('{storage_account_url}','{container_name}','{ blobpath }','{ account_name }','{ account_key }','{ clickhouse_format }')" + ) + elif not bucket_path: + # Local filesystem. + if not file_path: + raise LoadJobTerminalException( + file_path, + "If `bucket_path` isn't provided, then you m ust specify a local file path.", + ) + print(file_path) + table_function = ( + f"FROM INFILE '{file_path}' FORMAT {clickhouse_format}" + ) + else: + raise LoadJobTerminalException( + file_path, + f"Clickhouse loader does not support '{bucket_scheme}' filesystem.", ) with client.begin_transaction(): client.execute_sql( - f"""INSERT INTO {qualified_table_name} SELECT * FROM {table_function}""" + f"""INSERT INTO {qualified_table_name} {table_function}""" ) def state(self) -> TLoadJobState: @@ -446,19 +458,18 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non ) def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> LoadJob: - job = super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( + return super().start_file_load( + table, file_path, load_id + ) or ClickhouseLoadJob( file_path, table["name"], self.sql_client, staging_credentials=( - self.config.staging_config.credentials if self.config.staging_config else None + self.config.staging_config.credentials + if self.config.staging_config + else None ), ) - if not job: - assert NewReferenceJob.is_reference_job( - file_path - ), "Clickhouse must use staging to load files." - return job def _get_table_update_sql( self, table_name: str, new_columns: Sequence[TColumnSchema], generate_alter: bool diff --git a/dlt/destinations/impl/clickhouse/utils.py b/dlt/destinations/impl/clickhouse/utils.py index 8ab67d6522..7214d64036 100644 --- a/dlt/destinations/impl/clickhouse/utils.py +++ b/dlt/destinations/impl/clickhouse/utils.py @@ -56,7 +56,7 @@ def render_object_storage_table_function( clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_format] template = Template( - """s3('{{ url }}'{% if access_key_id and secret_access_key %},'{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}')""" + """SELECT * FROM s3('{{ url }}'{% if access_key_id and secret_access_key %},'{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}')""" ) return template.render( From 5d8a2283bee5efcb481b6b2213045493390f9a5a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 01:20:12 +0200 Subject: [PATCH 054/127] Refactor Clickhouse loader Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 74 ++++++--- dlt/destinations/impl/clickhouse/utils.py | 25 +-- tests/load/clickhouse/test_utls.py | 154 ------------------ 3 files changed, 49 insertions(+), 204 deletions(-) delete mode 100644 tests/load/clickhouse/test_utls.py diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 38975e38b4..b09f1e447f 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -4,6 +4,8 @@ from typing import ClassVar, Optional, Dict, List, Sequence, cast, Tuple from urllib.parse import urlparse +from jinja2 import Template + import dlt from dlt import config from dlt.common.configuration.specs import ( @@ -47,7 +49,6 @@ from dlt.destinations.impl.clickhouse.sql_client import ClickhouseSqlClient from dlt.destinations.impl.clickhouse.utils import ( convert_storage_to_http_scheme, - render_object_storage_table_function, FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING, SUPPORTED_FILE_FORMATS, ) @@ -104,9 +105,11 @@ class ClickhouseTypeMapper(TypeMapper): "Decimal": "decimal", } + def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = None) -> str: return "DateTime" + def from_db_type( self, db_type: str, precision: Optional[int] = None, scale: Optional[int] = None ) -> TColumnType: @@ -162,27 +165,28 @@ def __init__( FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name ) file_extension = os.path.splitext(file_name)[1][ - 1: - ].lower() # Remove dot (.) from file extension. + 1: + ].lower() # Remove dot (.) from file extension. if file_extension not in ["parquet", "jsonl"]: raise LoadJobTerminalException( file_path, "Clickhouse loader Only supports parquet and jsonl files." ) - if not config.get("data_writer.disable_compression"): - raise LoadJobTerminalException( - file_path, - "Clickhouse loader does not support gzip compressed files. Please disable" - " compression in the data writer configuration:" - " https://dlthub.com/docs/reference/performance#disabling-and-enabling-file-compression.", - ) + # if not config.get("data_writer.disable_compression"): + # raise LoadJobTerminalException( + # file_path, + # "Clickhouse loader does not support gzip compressed files. Please disable" + # " compression in the data writer configuration:" + # " https://dlthub.com/docs/reference/performance#disabling-and-enabling-file-compression.", + # ) bucket_url = urlparse(bucket_path) bucket_scheme = bucket_url.scheme file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] + compression = 'none' if config.get("data_writer.disable_compression") else 'gz' table_function: str table_function = "" @@ -203,10 +207,22 @@ def __init__( access_key_id = None secret_access_key = None - table_function = render_object_storage_table_function( - bucket_http_url, access_key_id, secret_access_key, file_format=file_extension + clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] + + template = Template( + """ + SELECT * FROM s3('{{ url }}'{% if access_key_id and secret_access_key %}, + '{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}') + """ ) + table_function = template.render( + url=bucket_http_url, + access_key_id=access_key_id, + secret_access_key=secret_access_key, + clickhouse_format=clickhouse_format, + ).strip() + elif bucket_scheme in ("az", "abfs"): if not isinstance(staging_credentials, AzureCredentialsWithoutDefaults): raise LoadJobTerminalException( @@ -224,33 +240,33 @@ def __init__( blobpath = bucket_url.path table_function = ( - f"SELECT * FROM azureBlobStorage('{storage_account_url}','{container_name}','{ blobpath }','{ account_name }','{ account_key }','{ clickhouse_format }')" + "SELECT * FROM" + f" azureBlobStorage('{storage_account_url}','{container_name}','{blobpath}','{account_name}','{account_key}','{clickhouse_format}')" ) elif not bucket_path: # Local filesystem. if not file_path: raise LoadJobTerminalException( file_path, - "If `bucket_path` isn't provided, then you m ust specify a local file path.", + "If `bucket_path` isn't provided, then you must specify a local file path.", ) print(file_path) - table_function = ( - f"FROM INFILE '{file_path}' FORMAT {clickhouse_format}" - ) + table_function = f"FROM INFILE '{file_path}' FORMAT {clickhouse_format}" else: raise LoadJobTerminalException( file_path, f"Clickhouse loader does not support '{bucket_scheme}' filesystem.", ) + print(table_function) with client.begin_transaction(): - client.execute_sql( - f"""INSERT INTO {qualified_table_name} {table_function}""" - ) + client.execute_sql(f"""INSERT INTO {qualified_table_name} {table_function}""") + def state(self) -> TLoadJobState: return "completed" + def exception(self) -> str: raise NotImplementedError() @@ -264,6 +280,7 @@ def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: # return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" + @classmethod def gen_merge_sql( cls, table_chain: Sequence[TTableSchema], sql_client: ClickhouseSqlClient # type: ignore[override] @@ -417,6 +434,7 @@ def gen_merge_sql( class ClickhouseClient(InsertValuesJobClient, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() + def __init__( self, schema: Schema, @@ -430,9 +448,11 @@ def __init__( self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) self.type_mapper = ClickhouseTypeMapper(self.capabilities) + def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] + def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: # Build column definition. # The primary key and sort order definition is defined outside column specification. @@ -457,20 +477,18 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non .strip() ) + def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> LoadJob: - return super().start_file_load( - table, file_path, load_id - ) or ClickhouseLoadJob( + return super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( file_path, table["name"], self.sql_client, staging_credentials=( - self.config.staging_config.credentials - if self.config.staging_config - else None + self.config.staging_config.credentials if self.config.staging_config else None ), ) + def _get_table_update_sql( self, table_name: str, new_columns: Sequence[TColumnSchema], generate_alter: bool ) -> List[str]: @@ -499,6 +517,7 @@ def _get_table_update_sql( return sql + def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns]: fields = self._get_storage_table_query_columns() db_params = self.sql_client.make_qualified_table_name(table_name, escape=False).split( @@ -527,6 +546,7 @@ def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns] schema_table[c[0]] = schema_c # type: ignore return True, schema_table + # Clickhouse fields are not nullable by default. @staticmethod @@ -534,10 +554,12 @@ def _gen_not_null(v: bool) -> str: # We use the `Nullable` modifier instead of NULL / NOT NULL modifiers to cater for ALTER statement. pass + def _from_db_type( self, ch_t: str, precision: Optional[int], scale: Optional[int] ) -> TColumnType: return self.type_mapper.from_db_type(ch_t, precision, scale) + def restore_file_load(self, file_path: str) -> LoadJob: return EmptyLoadJob.from_file_path(file_path, "completed") diff --git a/dlt/destinations/impl/clickhouse/utils.py b/dlt/destinations/impl/clickhouse/utils.py index 7214d64036..9f9de85133 100644 --- a/dlt/destinations/impl/clickhouse/utils.py +++ b/dlt/destinations/impl/clickhouse/utils.py @@ -1,8 +1,6 @@ -from typing import Union, Optional, Literal, Dict +from typing import Union, Literal, Dict from urllib.parse import urlparse, ParseResult -from jinja2 import Template - SUPPORTED_FILE_FORMATS = Literal["jsonl", "parquet"] FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING: Dict[SUPPORTED_FILE_FORMATS, str] = { @@ -44,24 +42,3 @@ def convert_storage_to_http_scheme( raise Exception(f"Error converting storage URL to HTTP protocol: '{url}'") from e -def render_object_storage_table_function( - url: str, - access_key_id: Optional[str] = None, - secret_access_key: Optional[str] = None, - file_format: SUPPORTED_FILE_FORMATS = "jsonl", -) -> str: - if file_format not in ["parquet", "jsonl"]: - raise ValueError("Clickhouse s3/gcs staging only supports 'parquet' and 'jsonl'.") - - clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_format] - - template = Template( - """SELECT * FROM s3('{{ url }}'{% if access_key_id and secret_access_key %},'{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}')""" - ) - - return template.render( - url=url, - access_key_id=access_key_id, - secret_access_key=secret_access_key, - clickhouse_format=clickhouse_format, - ).strip() diff --git a/tests/load/clickhouse/test_utls.py b/tests/load/clickhouse/test_utls.py deleted file mode 100644 index ea14c90daa..0000000000 --- a/tests/load/clickhouse/test_utls.py +++ /dev/null @@ -1,154 +0,0 @@ -import pytest - -from dlt.destinations.impl.clickhouse.utils import ( - convert_storage_to_http_scheme, - render_object_storage_table_function, -) - - -def test_clickhouse_convert_s3_url_to_http() -> None: - s3_url: str = "s3://my-bucket/path/to/file.txt" - expected_http_url: str = "http://my-bucket.s3.amazonaws.com/path/to/file.txt" - assert convert_storage_to_http_scheme(s3_url) == expected_http_url - - -def test_clickhouse_convert_s3_url_to_https() -> None: - s3_url: str = "s3://my-bucket/path/to/file.txt" - expected_https_url: str = "https://my-bucket.s3.amazonaws.com/path/to/file.txt" - assert convert_storage_to_http_scheme(s3_url, use_https=True) == expected_https_url - - -def test_clickhouse_convert_gs_url_to_http() -> None: - gs_url: str = "gs://my-bucket/path/to/file.txt" - expected_http_url: str = "http://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_to_http_scheme(gs_url) == expected_http_url - gcs_url = "gcs://my-bucket/path/to/file.txt" - expected_http_url = "http://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_to_http_scheme(gcs_url) == expected_http_url - - -def test_clickhouse_convert_gs_url_to_https() -> None: - gs_url: str = "gs://my-bucket/path/to/file.txt" - expected_https_url: str = "https://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_to_http_scheme(gs_url, use_https=True) == expected_https_url - gcs_url = "gcs://my-bucket/path/to/file.txt" - expected_https_url = "https://my-bucket.storage.googleapis.com/path/to/file.txt" - assert convert_storage_to_http_scheme(gcs_url, use_https=True) == expected_https_url - - -def test_clickhouse_convert_s3_url_to_http_with_region() -> None: - s3_url: str = "s3://my-bucket/path/to/file.txt" - expected_http_url: str = "http://my-bucket.s3-us-west-2.amazonaws.com/path/to/file.txt" - assert convert_storage_to_http_scheme(s3_url, region="us-west-2") == expected_http_url - - -def test_clickhouse_convert_s3_url_to_https_with_region() -> None: - s3_url: str = "s3://my-bucket/path/to/file.txt" - expected_https_url: str = "https://my-bucket.s3-us-east-1.amazonaws.com/path/to/file.txt" - assert ( - convert_storage_to_http_scheme(s3_url, use_https=True, region="us-east-1") - == expected_https_url - ) - - -def test_clickhouse_convert_s3_url_to_http_with_endpoint() -> None: - s3_url: str = "s3://my-bucket/path/to/file.txt" - expected_http_url: str = "http://my-bucket.s3.custom-endpoint.com/path/to/file.txt" - assert ( - convert_storage_to_http_scheme(s3_url, endpoint="s3.custom-endpoint.com") - == expected_http_url - ) - - -def test_convert_s3_url_to_https_with_endpoint() -> None: - s3_url: str = "s3://my-bucket/path/to/file.txt" - expected_https_url: str = "https://my-bucket.s3.custom-endpoint.com/path/to/file.txt" - assert ( - convert_storage_to_http_scheme(s3_url, use_https=True, endpoint="s3.custom-endpoint.com") - == expected_https_url - ) - - -def test_clickhouse_convert_gs_url_to_http_with_endpoint() -> None: - gs_url: str = "gs://my-bucket/path/to/file.txt" - expected_http_url: str = "http://my-bucket.custom-endpoint.com/path/to/file.txt" - assert ( - convert_storage_to_http_scheme(gs_url, endpoint="custom-endpoint.com") == expected_http_url - ) - gcs_url = "gcs://my-bucket/path/to/file.txt" - expected_http_url = "http://my-bucket.custom-endpoint.com/path/to/file.txt" - assert ( - convert_storage_to_http_scheme(gcs_url, endpoint="custom-endpoint.com") == expected_http_url - ) - - -def test_clickhouse_convert_gs_url_to_https_with_endpoint() -> None: - gs_url: str = "gs://my-bucket/path/to/file.txt" - expected_https_url: str = "https://my-bucket.custom-endpoint.com/path/to/file.txt" - assert ( - convert_storage_to_http_scheme(gs_url, use_https=True, endpoint="custom-endpoint.com") - == expected_https_url - ) - gcs_url = "gcs://my-bucket/path/to/file.txt" - expected_https_url = "https://my-bucket.custom-endpoint.com/path/to/file.txt" - assert ( - convert_storage_to_http_scheme(gcs_url, use_https=True, endpoint="custom-endpoint.com") - == expected_https_url - ) - - -def test_clickhouse_render_with_credentials_jsonl() -> None: - url = "https://example.com/data.jsonl" - access_key_id = "test_access_key" - secret_access_key = "test_secret_key" - file_format = "jsonl" - expected_output = ( - """s3('https://example.com/data.jsonl','test_access_key','test_secret_key','JSONEachRow')""" - ) - assert ( - render_object_storage_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] - == expected_output - ) - - -def test_clickhouse_render_with_credentials_parquet() -> None: - url = "https://example.com/data.parquet" - access_key_id = "test_access_key" - secret_access_key = "test_secret_key" - file_format = "parquet" - expected_output = ( - """s3('https://example.com/data.parquet','test_access_key','test_secret_key','Parquet')""" - ) - assert ( - render_object_storage_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] - == expected_output - ) - - -def test_clickhouse_render_without_credentials() -> None: - url = "https://example.com/data.jsonl" - file_format = "jsonl" - expected_output = """s3('https://example.com/data.jsonl',NOSIGN,'JSONEachRow')""" - assert render_object_storage_table_function(url, file_format=file_format) == expected_output # type: ignore[arg-type] - - -def test_clickhouse_render_invalid_file_format() -> None: - url = "https://example.com/data.unknown" - access_key_id = "test_access_key" - secret_access_key = "test_secret_key" - file_format = "unknown" - with pytest.raises(ValueError) as excinfo: - render_object_storage_table_function(url, access_key_id, secret_access_key, file_format) # type: ignore[arg-type] - assert "Clickhouse s3/gcs staging only supports 'parquet' and 'jsonl'." == str(excinfo.value) - - -def test_clickhouse_invalid_url_format() -> None: - with pytest.raises(Exception) as exc_info: - convert_storage_to_http_scheme("invalid-url") - assert str(exc_info.value) == "Error converting storage URL to HTTP protocol: 'invalid-url'" - - -def test_clickhouse_render_missing_url() -> None: - with pytest.raises(TypeError) as excinfo: - render_object_storage_table_function() # type: ignore - assert "missing 1 required positional argument: 'url'" in str(excinfo.value) From d454d8f36274ab2251591c175f3f836f4d28f19b Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 01:23:45 +0200 Subject: [PATCH 055/127] WIP Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 26 ++++--------------- dlt/destinations/impl/clickhouse/utils.py | 2 -- 2 files changed, 5 insertions(+), 23 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index b09f1e447f..3e75025be6 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -105,11 +105,9 @@ class ClickhouseTypeMapper(TypeMapper): "Decimal": "decimal", } - def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = None) -> str: return "DateTime" - def from_db_type( self, db_type: str, precision: Optional[int] = None, scale: Optional[int] = None ) -> TColumnType: @@ -165,8 +163,8 @@ def __init__( FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name ) file_extension = os.path.splitext(file_name)[1][ - 1: - ].lower() # Remove dot (.) from file extension. + 1: + ].lower() # Remove dot (.) from file extension. if file_extension not in ["parquet", "jsonl"]: raise LoadJobTerminalException( @@ -186,7 +184,7 @@ def __init__( file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - compression = 'none' if config.get("data_writer.disable_compression") else 'gz' + # compression = "none" if config.get("data_writer.disable_compression") else "gz" table_function: str table_function = "" @@ -209,12 +207,10 @@ def __init__( clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - template = Template( - """ + template = Template(""" SELECT * FROM s3('{{ url }}'{% if access_key_id and secret_access_key %}, '{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}') - """ - ) + """) table_function = template.render( url=bucket_http_url, @@ -262,11 +258,9 @@ def __init__( with client.begin_transaction(): client.execute_sql(f"""INSERT INTO {qualified_table_name} {table_function}""") - def state(self) -> TLoadJobState: return "completed" - def exception(self) -> str: raise NotImplementedError() @@ -280,7 +274,6 @@ def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: # return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" - @classmethod def gen_merge_sql( cls, table_chain: Sequence[TTableSchema], sql_client: ClickhouseSqlClient # type: ignore[override] @@ -434,7 +427,6 @@ def gen_merge_sql( class ClickhouseClient(InsertValuesJobClient, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() - def __init__( self, schema: Schema, @@ -448,11 +440,9 @@ def __init__( self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) self.type_mapper = ClickhouseTypeMapper(self.capabilities) - def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] - def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: # Build column definition. # The primary key and sort order definition is defined outside column specification. @@ -477,7 +467,6 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non .strip() ) - def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> LoadJob: return super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( file_path, @@ -488,7 +477,6 @@ def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> ), ) - def _get_table_update_sql( self, table_name: str, new_columns: Sequence[TColumnSchema], generate_alter: bool ) -> List[str]: @@ -517,7 +505,6 @@ def _get_table_update_sql( return sql - def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns]: fields = self._get_storage_table_query_columns() db_params = self.sql_client.make_qualified_table_name(table_name, escape=False).split( @@ -546,7 +533,6 @@ def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns] schema_table[c[0]] = schema_c # type: ignore return True, schema_table - # Clickhouse fields are not nullable by default. @staticmethod @@ -554,12 +540,10 @@ def _gen_not_null(v: bool) -> str: # We use the `Nullable` modifier instead of NULL / NOT NULL modifiers to cater for ALTER statement. pass - def _from_db_type( self, ch_t: str, precision: Optional[int], scale: Optional[int] ) -> TColumnType: return self.type_mapper.from_db_type(ch_t, precision, scale) - def restore_file_load(self, file_path: str) -> LoadJob: return EmptyLoadJob.from_file_path(file_path, "completed") diff --git a/dlt/destinations/impl/clickhouse/utils.py b/dlt/destinations/impl/clickhouse/utils.py index 9f9de85133..b0b06909f9 100644 --- a/dlt/destinations/impl/clickhouse/utils.py +++ b/dlt/destinations/impl/clickhouse/utils.py @@ -40,5 +40,3 @@ def convert_storage_to_http_scheme( return f"{protocol}://{bucket_name}.{domain}/{object_key}" except Exception as e: raise Exception(f"Error converting storage URL to HTTP protocol: '{url}'") from e - - From fdd052ad44e754288cb97f984ccc63b8dd1c6b97 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 14:59:57 +0200 Subject: [PATCH 056/127] Remove from standard sql tests Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 4 ++-- dlt/destinations/impl/clickhouse/clickhouse.py | 13 +++++-------- tests/load/utils.py | 1 - 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 6136e0078d..1ade9649cd 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -8,8 +8,8 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() # Clickhouse only supports loading from staged files on s3 for now. - caps.preferred_loader_file_format = "jsonl" - caps.supported_loader_file_formats = ["parquet", "jsonl"] + caps.preferred_loader_file_format = "insert_values" + caps.supported_loader_file_formats = ["insert_values"] caps.preferred_staging_file_format = "jsonl" caps.supported_staging_file_formats = ["parquet", "jsonl"] diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 3e75025be6..3b44168305 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -7,7 +7,6 @@ from jinja2 import Template import dlt -from dlt import config from dlt.common.configuration.specs import ( CredentialsConfiguration, AzureCredentialsWithoutDefaults, @@ -241,13 +240,11 @@ def __init__( ) elif not bucket_path: # Local filesystem. - if not file_path: - raise LoadJobTerminalException( - file_path, - "If `bucket_path` isn't provided, then you must specify a local file path.", - ) - print(file_path) - table_function = f"FROM INFILE '{file_path}' FORMAT {clickhouse_format}" + raise LoadJobTerminalException( + file_path, + "Cannot load from local file. Clickhouse does not support loading from local files." + " Configure staging with an s3, gcs or azure storage bucket.", + ) else: raise LoadJobTerminalException( file_path, diff --git a/tests/load/utils.py b/tests/load/utils.py index 93055cbd2b..2c7d89f822 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -200,7 +200,6 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration( destination="clickhouse", - file_format="jsonl", disable_compression=True, ) ] From 939bd35274756f3ded5f7e9251772ace9340ed30 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 16:01:48 +0200 Subject: [PATCH 057/127] Remove unnecessary compression #1055 Signed-off-by: Marcel Coetzee --- tests/load/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/load/utils.py b/tests/load/utils.py index 2c7d89f822..ba84327687 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -200,7 +200,6 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration( destination="clickhouse", - disable_compression=True, ) ] destination_configs += [ From b554693a8c7c8e771d5ae1fbb258d5a01121de5f Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 19:57:06 +0200 Subject: [PATCH 058/127] Dataset prefix and dataset-table seperator #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 4 +- .../impl/clickhouse/clickhouse.py | 34 ++-- .../impl/clickhouse/sql_client.py | 43 ++--- poetry.lock | 154 +++++++++++++++++- pyproject.toml | 3 +- tests/load/utils.py | 2 + 6 files changed, 206 insertions(+), 34 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 1ade9649cd..6136e0078d 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -8,8 +8,8 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() # Clickhouse only supports loading from staged files on s3 for now. - caps.preferred_loader_file_format = "insert_values" - caps.supported_loader_file_formats = ["insert_values"] + caps.preferred_loader_file_format = "jsonl" + caps.supported_loader_file_formats = ["parquet", "jsonl"] caps.preferred_staging_file_format = "jsonl" caps.supported_staging_file_formats = ["parquet", "jsonl"] diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 3b44168305..592200378f 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -4,6 +4,8 @@ from typing import ClassVar, Optional, Dict, List, Sequence, cast, Tuple from urllib.parse import urlparse +import clickhouse_connect +from clickhouse_connect.driver.tools import insert_file from jinja2 import Template import dlt @@ -182,11 +184,11 @@ def __init__( bucket_scheme = bucket_url.scheme file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) - clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] + clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] # compression = "none" if config.get("data_writer.disable_compression") else "gz" - table_function: str - table_function = "" + table_function: str = "" + statement: str = "" if bucket_scheme in ("s3", "gs", "gcs"): bucket_http_url = convert_storage_to_http_scheme(bucket_url) @@ -217,6 +219,7 @@ def __init__( secret_access_key=secret_access_key, clickhouse_format=clickhouse_format, ).strip() + statement = f"INSERT INTO {qualified_table_name} {table_function}" elif bucket_scheme in ("az", "abfs"): if not isinstance(staging_credentials, AzureCredentialsWithoutDefaults): @@ -238,22 +241,33 @@ def __init__( "SELECT * FROM" f" azureBlobStorage('{storage_account_url}','{container_name}','{blobpath}','{account_name}','{account_key}','{clickhouse_format}')" ) + statement = f"INSERT INTO {qualified_table_name} {table_function}" elif not bucket_path: # Local filesystem. - raise LoadJobTerminalException( - file_path, - "Cannot load from local file. Clickhouse does not support loading from local files." - " Configure staging with an s3, gcs or azure storage bucket.", - ) + with clickhouse_connect.get_client( + host=client.credentials.host, + port=client.credentials.port, + database=client.credentials.database, + user_name=client.credentials.username, + password=client.credentials.password, + secure=bool(client.credentials.secure), + ) as clickhouse_connect_client: + insert_file( + clickhouse_connect_client, + qualified_table_name, + file_path, + fmt=clickhouse_format, + database=client.database_name, + ) + statement = "" else: raise LoadJobTerminalException( file_path, f"Clickhouse loader does not support '{bucket_scheme}' filesystem.", ) - print(table_function) with client.begin_transaction(): - client.execute_sql(f"""INSERT INTO {qualified_table_name} {table_function}""") + client.execute_sql(statement) def state(self) -> TLoadJobState: return "completed" diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index a788ccdad0..e52c0e6207 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -13,6 +13,7 @@ from clickhouse_driver.dbapi import OperationalError # type: ignore[import-untyped] from clickhouse_driver.dbapi.extras import DictCursor # type: ignore[import-untyped] +import dlt from dlt.common.destination import DestinationCapabilitiesContext from dlt.destinations.exceptions import ( DatabaseUndefinedRelation, @@ -34,6 +35,10 @@ TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE = ( "Clickhouse does not support transactions! Each statement is auto-committed separately." ) +DATASET_PREFIX = dlt.config["destination.clickhouse.credentials.dataset_prefix"] or "__" +DATASET_TABLE_SEPARATOR = ( + dlt.config["destination.clickhouse.credentials.dataset_table_separator"] or "___" +) class ClickhouseDBApiCursorImpl(DBApiCursorImpl): @@ -146,28 +151,28 @@ def execute_query( yield ClickhouseDBApiCursorImpl(cursor) # type: ignore[abstract] def fully_qualified_dataset_name(self, escape: bool = True) -> str: - database_name = ( - self.capabilities.escape_identifier(self.database_name) - if escape - else self.database_name - ) - dataset_name = ( - self.capabilities.escape_identifier(self.dataset_name) if escape else self.dataset_name - ) + if escape: + database_name = self.capabilities.escape_identifier(self.database_name) + dataset_name = self.capabilities.escape_identifier( + f"{DATASET_PREFIX}{self.dataset_name}" + ) + else: + database_name = self.database_name + dataset_name = f"{DATASET_PREFIX}{self.dataset_name}" return f"{database_name}.{dataset_name}" def make_qualified_table_name(self, table_name: str, escape: bool = True) -> str: - database_name = ( - self.capabilities.escape_identifier(self.database_name) - if escape - else self.database_name - ) - dataset_table_name = ( - self.capabilities.escape_identifier(f"{self.dataset_name}_{table_name}") - if escape - else f"{self.dataset_name}_{table_name}" - ) - return f"{database_name}.{dataset_table_name}" + if escape: + database_name = self.capabilities.escape_identifier(self.database_name) + dataset_and_table = self.capabilities.escape_identifier( + f"{DATASET_PREFIX}{self.dataset_name}{DATASET_TABLE_SEPARATOR}{table_name}" + ) + else: + database_name = self.database_name + dataset_and_table = ( + f"{DATASET_PREFIX}{self.dataset_name}{DATASET_TABLE_SEPARATOR}{table_name}" + ) + return f"{database_name}.{dataset_and_table}" @classmethod def _make_database_exception(cls, ex: Exception) -> Exception: # type: ignore[return] diff --git a/poetry.lock b/poetry.lock index b38984b03c..64a93acca9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1670,6 +1670,95 @@ files = [ click = ">=4.0" PyYAML = ">=3.11" +[[package]] +name = "clickhouse-connect" +version = "0.7.7" +description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" +optional = true +python-versions = "~=3.8" +files = [ + {file = "clickhouse-connect-0.7.7.tar.gz", hash = "sha256:c9ff4377ceefaa47c382a7372da4fdbf807f931000ad9b5bab850a60166b0d1c"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:233037de24cf1d9ed0bed0a507b970845e7f181da708198a0a033b05ae408969"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ea4deac8d35323d328155497ba51d80b54a20c2dd2d93b5fd6a1d8f92dca630"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e24036258ed72e3440be3b7302bbd110157bcdddc1a92e0f6829bba5eb10b7a"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80826f10017c07f00cac111fcdf0ac8ec4075d6ae718047cac6440ce3df9816e"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d72fec2df3996edfd0a7b2d9c51a99db8771f122537bd1ef963d3d7ccb315c"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:542c9fa4c92de8bcd77b8503b7ef778f32f307823eba37a3fb955e0b82fc8e7e"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57143505ab5c17391bdaaa6962d2800beebc7d0ab0dc23de5a097978394e75ea"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:716f13d77b9f82bbe52e428519dc00309ca185add10fa56662a5f07dbccda595"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-win32.whl", hash = "sha256:eae28d7b14df34fd853b3b8c6811b002d38e7d04015c034fefc2332dda9807af"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-win_amd64.whl", hash = "sha256:fe418891f755d39d82036c5b311ddb37f54bf331141a463b69089334a3b676c8"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5be44154f86d642fddeffe4466ad7fc53370468102a83ba3805a337693347210"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4a898d7dcb0e2c13a73d823569e3d5e8ef4f1a1b25ead0dc6be04ea277a2488"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d25256130e2601c0c4e815e7a05b3732713c7389170d18df36e2c37ed20e11"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f123122c34d2fb8a68911c70872be7db749d406e18fcd165e7cdfea45f372c20"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49392e83e4691aec1c3050cb0e7534cab196e0da23065adcfbe7f0d77523c586"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7fe3731800957c374b7d8b3c1f959f766f7946d478e0f3f208815935b9231dec"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2bc66286e5374e01f1df92d21293bdf40d5cabf664dabd6ea8f99ba495354c12"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a78794703bea0df09a5fe5965a69cc7f8044f72e8470efc123257dea77a06edf"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-win32.whl", hash = "sha256:5a5764a2ec7e1085136789e29ecc69dd19b799c071ec5eff63f7f13a82fcb1bf"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-win_amd64.whl", hash = "sha256:c43c693b8a360d948b0b8914b37b233d61e63286d921a753c7f8cfb96cff607f"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:355a14e500d7f99a2ab152886ae253104edf65ce32605d7670691d399527564f"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:865a9cb3aa823a8e5d4b73892b4cba810b514162fafe52f7c6a76abf79483dbd"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c175735b6c9ec884fe8d196cbda98bde53c7d376e8d7df1deac407d678f2250"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7e4ab3e949bcae660cc4d825416f45a840db70ea529f4055f47add7cc6a380c"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fb84c3cf9009c0192cc025908a215a83e322d8964116c40239407470e591025"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:41caca3ee5db49311a55b885a60e51f94ff29e8f56083a04db3383c5604013e0"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f99f89badd6ca8f731bc7bc9d5c30b43bcec9286446b57d640206573d15f8a37"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a6b1a072763e83d2670ac45f0e3ebd58f9aa2c430d12daf2cd7cdc7fc8049e35"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-win32.whl", hash = "sha256:95fa9482fab216806632e80c0f9ad3d28433fb73295afbb2fc56b8d437de70c5"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-win_amd64.whl", hash = "sha256:59b95cfb84795bf0c23a6d9eac43fbba6de0fb57ac8d12efdcf82e6408a2ff4c"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8616917cdacdd388c2678faf8989594a8db2fac35b324931aff18193b5e8e97e"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1bbc60823cfced80b2abc0042d515ad18f02900577b230c97203bac805120eb6"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ac16feb0a0510b5ee0cfcc898bdaa7f149e7dc250126bf302828fdec24189e"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf214483628740bac0801abba8d124408bfc21aeec26a97a81996975b6795f4a"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77f4160de7db18d8f3ba9ebce12c629be825e630a85380618bc2f4fe2fdcf565"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1eac642de2aeef60644944a9fae576fda4d6216a0b5880e50cc68459090bb0"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:12d31e3bd21fdc3b7c797a22ec94a41d584cd78ea925c6145ae83c74930a2675"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:79e5611a67d35cd6f444e9b715cb0bfbf216129462e1fe244dd27474c3e12ba5"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-win32.whl", hash = "sha256:00a53122c895ff9a364542a03c851c651331c986d601a6a3f0a8d64a63d3f33a"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-win_amd64.whl", hash = "sha256:4e826059be77957c695c0925f4f94f4111cc18e9cfa80798469e13d786d43034"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2d0fc9fca68451eddf84c0261be951b45d29b3cb0fe2775abd97f2963e52a5cc"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9fbc8c3460c9016357aa142bf1cef62e67c4e8e92344ecef50368507f551c4e"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1872550d0859608dfeef90fef80c0f21d19f390dd301562e9dfb4c58010a825c"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cceb3db2d3ce4bb94c21fc648af42aa3f29ea9c001f50cff862562d7a314f044"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06ab8705a60c2ba75be4e160eba97a073da5f3da7b2a8fe75e2a3c0f6d943ee8"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8d1f99db0d091dcd9d488a3cde76973048d678e2bd7408d46b11a9dc0cac8963"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7d696c31145a9373d71388f035b79cd5182b6931aa4663b4f22c9c339607022b"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0fac18dc51bbd9ed7c54271b6b9f45bdf4aeded51903da05d6250a6e851437c3"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-win32.whl", hash = "sha256:583c0928431cd0da4ad234a70935631035b13f99a746cb0e6b75876c890adb9c"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-win_amd64.whl", hash = "sha256:9440118da473b22bfb4c1e28de8f3ee03fb8cc5b30d3bcd47e60a2e07007f907"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6d5afcb747d562db33a8d89f82eb0b14a98d020553793650b8c7b0098cdd4228"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42655edd00c72f29ad493ae2c40b149285d174eb8c7f53570566f575b8bc1834"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c9a3827daa446d72f2085bebfd06f9b2922a17bc632bcb6874d9e015667f6d"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdc483d70d6c465667e509c67ebe90df1547f3c4dc40141215a23231b0f508b1"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ca9e774178758503d45c07f0b7a3c6e9873d40265057a1dcfb698913ddef743a"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4ea52cef11a3fa52aa4800b31cff1161719046775243b2f211b5dbb0e7b82cf2"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63fd602b8a5261713cec048a31e2983f6302be5fb3476f57ae38c6c827857b3e"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47c0d9fdeda44de81f5c5c87e3b978f1d9f39a22b3f4239f341d5dcff42f0e73"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4dee8e39d03a3092663272d601e3274b6b350332ae5cecb3909fbac411a2287"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d6fdc70fc0285556704aa3087cc443cefa6b679a72c1559a70cac3d31a2bd3bf"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c6faf19cf11f29986bd3ea568a86859fa3d492429268d6e2dd632d6cf48fe62b"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17d15ce6d21f8f8cd8ae9a76df0c2bc713e1741b42a9851d13ac12e450e63667"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b3b3dc9c25b4f5b93d79a338eb3092cde61cfb5e25b76b6456ffe01637b138"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcc6840d8204ea621b7cfaee79d873dbea8314a47ba39e05894f0338c05641be"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ea04b094cf3a2cb0335b6f46d895a5019d2eab0b2eb9a0abbbf939d473c36218"}, +] + +[package.dependencies] +certifi = "*" +lz4 = "*" +pytz = "*" +urllib3 = ">=1.26" +zstandard = "*" + +[package.extras] +arrow = ["pyarrow"] +numpy = ["numpy"] +orjson = ["orjson"] +pandas = ["pandas"] +sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] + [[package]] name = "clickhouse-driver" version = "0.2.7" @@ -9080,12 +9169,73 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +[[package]] +name = "zstandard" +version = "0.22.0" +description = "Zstandard bindings for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"}, + {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"}, + {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"}, + {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"}, + {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"}, + {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"}, + {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"}, + {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"}, + {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"}, + {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"}, + {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"}, + {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"}, + {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + [extras] athena = ["botocore", "pyarrow", "pyathena", "s3fs"] az = ["adlfs"] bigquery = ["gcsfs", "google-cloud-bigquery", "grpcio", "pyarrow"] cli = ["cron-descriptor", "pipdeptree"] -clickhouse = ["adlfs", "clickhouse-driver", "gcsfs", "pyarrow", "s3fs"] +clickhouse = ["adlfs", "clickhouse-connect", "clickhouse-driver", "gcsfs", "pyarrow", "s3fs"] databricks = ["databricks-sql-connector"] dbt = ["dbt-athena-community", "dbt-bigquery", "dbt-core", "dbt-databricks", "dbt-duckdb", "dbt-redshift", "dbt-snowflake"] duckdb = ["duckdb", "duckdb"] @@ -9106,4 +9256,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "b3976f6c6626123bd0c102534cf01ad7f72682699c3d0380c480531b00c20663" +content-hash = "71329bd03e3d09294aaacc663896e47d9c22bf43ee1434bddb84beb4d7fc0269" diff --git a/pyproject.toml b/pyproject.toml index fd497513ad..423dc25cd4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,6 +81,7 @@ qdrant-client = {version = "^1.6.4", optional = true, extras = ["fastembed"]} databricks-sql-connector = {version = ">=2.9.3,<3.0.0", optional = true} dbt-databricks = {version = "^1.7.3", optional = true} clickhouse-driver = { version = "^0.2.7", optional = true } +clickhouse-connect = { version = "^0.7.7", optional = true } [tool.poetry.extras] dbt = ["dbt-core", "dbt-redshift", "dbt-bigquery", "dbt-duckdb", "dbt-snowflake", "dbt-athena-community", "dbt-databricks"] @@ -104,7 +105,7 @@ mssql = ["pyodbc"] synapse = ["pyodbc", "adlfs", "pyarrow"] qdrant = ["qdrant-client"] databricks = ["databricks-sql-connector"] -clickhouse = ["clickhouse-driver", "s3fs", "gcsfs", "pyarrow", "adlfs"] +clickhouse = ["clickhouse-driver", "clickhouse-connect", "s3fs", "gcsfs", "adlfs", "pyarrow"] [tool.poetry.scripts] dlt = "dlt.cli._dlt:_main" diff --git a/tests/load/utils.py b/tests/load/utils.py index ba84327687..93055cbd2b 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -200,6 +200,8 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration( destination="clickhouse", + file_format="jsonl", + disable_compression=True, ) ] destination_configs += [ From 8215f446ea876c6fb43fe8c2a4ef40bfa840f640 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 21:08:30 +0200 Subject: [PATCH 059/127] Remove DATASET_PREFIX from sql_client.py Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/sql_client.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index e52c0e6207..b8ba9cf6a9 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -35,10 +35,6 @@ TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE = ( "Clickhouse does not support transactions! Each statement is auto-committed separately." ) -DATASET_PREFIX = dlt.config["destination.clickhouse.credentials.dataset_prefix"] or "__" -DATASET_TABLE_SEPARATOR = ( - dlt.config["destination.clickhouse.credentials.dataset_table_separator"] or "___" -) class ClickhouseDBApiCursorImpl(DBApiCursorImpl): @@ -154,23 +150,26 @@ def fully_qualified_dataset_name(self, escape: bool = True) -> str: if escape: database_name = self.capabilities.escape_identifier(self.database_name) dataset_name = self.capabilities.escape_identifier( - f"{DATASET_PREFIX}{self.dataset_name}" + f"{self.dataset_name}" ) else: database_name = self.database_name - dataset_name = f"{DATASET_PREFIX}{self.dataset_name}" + dataset_name = f"{self.dataset_name}" return f"{database_name}.{dataset_name}" def make_qualified_table_name(self, table_name: str, escape: bool = True) -> str: + dataset_table_separator = dlt.config[ + "destination.clickhouse.credentials.dataset_table_separator" + ] if escape: database_name = self.capabilities.escape_identifier(self.database_name) dataset_and_table = self.capabilities.escape_identifier( - f"{DATASET_PREFIX}{self.dataset_name}{DATASET_TABLE_SEPARATOR}{table_name}" + f"{self.dataset_name}{dataset_table_separator}{table_name}" ) else: database_name = self.database_name dataset_and_table = ( - f"{DATASET_PREFIX}{self.dataset_name}{DATASET_TABLE_SEPARATOR}{table_name}" + f"{self.dataset_name}{dataset_table_separator}{table_name}" ) return f"{database_name}.{dataset_and_table}" From 881a0b9693c38539ff56ebe3d8512448e8e10ead Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 22:56:31 +0200 Subject: [PATCH 060/127] Add clickhouse connect as local fallback #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 40 +++++++++++-------- .../impl/clickhouse/configuration.py | 9 ++++- .../impl/clickhouse/sql_client.py | 8 +--- 3 files changed, 32 insertions(+), 25 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 592200378f..7369a62149 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -244,30 +244,36 @@ def __init__( statement = f"INSERT INTO {qualified_table_name} {table_function}" elif not bucket_path: # Local filesystem. - with clickhouse_connect.get_client( - host=client.credentials.host, - port=client.credentials.port, - database=client.credentials.database, - user_name=client.credentials.username, - password=client.credentials.password, - secure=bool(client.credentials.secure), - ) as clickhouse_connect_client: - insert_file( - clickhouse_connect_client, - qualified_table_name, + try: + with clickhouse_connect.create_client( + host=client.credentials.host, + port=client.credentials.http_port, + database=client.credentials.database, + user_name=client.credentials.username, + password=client.credentials.password, + secure=bool(client.credentials.secure), + ) as clickhouse_connect_client: + insert_file( + clickhouse_connect_client, + qualified_table_name, + file_path, + fmt=clickhouse_format, + ) + except clickhouse_connect.driver.exceptions.Error as e: + raise LoadJobTerminalException( file_path, - fmt=clickhouse_format, - database=client.database_name, - ) - statement = "" + f"Clickhouse connection failed due to {e}.", + ) from e else: raise LoadJobTerminalException( file_path, f"Clickhouse loader does not support '{bucket_scheme}' filesystem.", ) - with client.begin_transaction(): - client.execute_sql(statement) + # Don't use dbapi driver for local files. + if bucket_path: + with client.begin_transaction(): + client.execute_sql(statement) def state(self) -> TLoadJobState: return "completed" diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 534ea7acfd..f35d6f944f 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -19,14 +19,18 @@ class ClickhouseCredentials(ConnectionStringCredentials): host: str # type: ignore """Host with running ClickHouse server.""" port: int = 9440 - """Port ClickHouse server is bound to. Defaults to 9000.""" + """Native port ClickHouse server is bound to. Defaults to 9440.""" + http_port: int = 8443 + """HTTP Port to connect to ClickHouse server's HTTP interface. + The HTTP port is needed for non-staging pipelines. + Defaults to 8123.""" username: str = "default" """Database user. Defaults to 'default'.""" database: str = "default" """database connect to. Defaults to 'default'.""" secure: TSecureConnection = 1 """Enables TLS encryption when connecting to ClickHouse Server. 0 means no encryption, 1 means encrypted.""" - connect_timeout: int = 10 + connect_timeout: int = 15 """Timeout for establishing connection. Defaults to 10 seconds.""" send_receive_timeout: int = 300 """Timeout for sending and receiving data. Defaults to 300 seconds.""" @@ -34,6 +38,7 @@ class ClickhouseCredentials(ConnectionStringCredentials): __config_gen_annotations__: ClassVar[List[str]] = [ "host", "port", + "http_port", "username", "database", "secure", diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index b8ba9cf6a9..0857f15f52 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -149,9 +149,7 @@ def execute_query( def fully_qualified_dataset_name(self, escape: bool = True) -> str: if escape: database_name = self.capabilities.escape_identifier(self.database_name) - dataset_name = self.capabilities.escape_identifier( - f"{self.dataset_name}" - ) + dataset_name = self.capabilities.escape_identifier(f"{self.dataset_name}") else: database_name = self.database_name dataset_name = f"{self.dataset_name}" @@ -168,9 +166,7 @@ def make_qualified_table_name(self, table_name: str, escape: bool = True) -> str ) else: database_name = self.database_name - dataset_and_table = ( - f"{self.dataset_name}{dataset_table_separator}{table_name}" - ) + dataset_and_table = f"{self.dataset_name}{dataset_table_separator}{table_name}" return f"{database_name}.{dataset_and_table}" @classmethod From a4904a8cf8d053eb08ac0dfa86bcceaef3df8fde Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 23:19:33 +0200 Subject: [PATCH 061/127] Set settings on local #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 7369a62149..362e26a4d1 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -258,6 +258,10 @@ def __init__( qualified_table_name, file_path, fmt=clickhouse_format, + settings={ + "allow_experimental_lightweight_delete": 1, + "allow_experimental_object_type": 1, + }, ) except clickhouse_connect.driver.exceptions.Error as e: raise LoadJobTerminalException( From b44bea3d5a7092680975a5fe97d56e641f388b60 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 5 Apr 2024 23:22:49 +0200 Subject: [PATCH 062/127] Update lock Signed-off-by: Marcel Coetzee --- poetry.lock | 346 ++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 293 insertions(+), 53 deletions(-) diff --git a/poetry.lock b/poetry.lock index 458614b792..ee1431011e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "about-time" @@ -1670,6 +1670,217 @@ files = [ click = ">=4.0" PyYAML = ">=3.11" +[[package]] +name = "clickhouse-connect" +version = "0.7.7" +description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" +optional = true +python-versions = "~=3.8" +files = [ + {file = "clickhouse-connect-0.7.7.tar.gz", hash = "sha256:c9ff4377ceefaa47c382a7372da4fdbf807f931000ad9b5bab850a60166b0d1c"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:233037de24cf1d9ed0bed0a507b970845e7f181da708198a0a033b05ae408969"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ea4deac8d35323d328155497ba51d80b54a20c2dd2d93b5fd6a1d8f92dca630"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e24036258ed72e3440be3b7302bbd110157bcdddc1a92e0f6829bba5eb10b7a"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80826f10017c07f00cac111fcdf0ac8ec4075d6ae718047cac6440ce3df9816e"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d72fec2df3996edfd0a7b2d9c51a99db8771f122537bd1ef963d3d7ccb315c"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:542c9fa4c92de8bcd77b8503b7ef778f32f307823eba37a3fb955e0b82fc8e7e"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57143505ab5c17391bdaaa6962d2800beebc7d0ab0dc23de5a097978394e75ea"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:716f13d77b9f82bbe52e428519dc00309ca185add10fa56662a5f07dbccda595"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-win32.whl", hash = "sha256:eae28d7b14df34fd853b3b8c6811b002d38e7d04015c034fefc2332dda9807af"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-win_amd64.whl", hash = "sha256:fe418891f755d39d82036c5b311ddb37f54bf331141a463b69089334a3b676c8"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5be44154f86d642fddeffe4466ad7fc53370468102a83ba3805a337693347210"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4a898d7dcb0e2c13a73d823569e3d5e8ef4f1a1b25ead0dc6be04ea277a2488"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d25256130e2601c0c4e815e7a05b3732713c7389170d18df36e2c37ed20e11"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f123122c34d2fb8a68911c70872be7db749d406e18fcd165e7cdfea45f372c20"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49392e83e4691aec1c3050cb0e7534cab196e0da23065adcfbe7f0d77523c586"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7fe3731800957c374b7d8b3c1f959f766f7946d478e0f3f208815935b9231dec"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2bc66286e5374e01f1df92d21293bdf40d5cabf664dabd6ea8f99ba495354c12"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a78794703bea0df09a5fe5965a69cc7f8044f72e8470efc123257dea77a06edf"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-win32.whl", hash = "sha256:5a5764a2ec7e1085136789e29ecc69dd19b799c071ec5eff63f7f13a82fcb1bf"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-win_amd64.whl", hash = "sha256:c43c693b8a360d948b0b8914b37b233d61e63286d921a753c7f8cfb96cff607f"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:355a14e500d7f99a2ab152886ae253104edf65ce32605d7670691d399527564f"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:865a9cb3aa823a8e5d4b73892b4cba810b514162fafe52f7c6a76abf79483dbd"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c175735b6c9ec884fe8d196cbda98bde53c7d376e8d7df1deac407d678f2250"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7e4ab3e949bcae660cc4d825416f45a840db70ea529f4055f47add7cc6a380c"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fb84c3cf9009c0192cc025908a215a83e322d8964116c40239407470e591025"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:41caca3ee5db49311a55b885a60e51f94ff29e8f56083a04db3383c5604013e0"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f99f89badd6ca8f731bc7bc9d5c30b43bcec9286446b57d640206573d15f8a37"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a6b1a072763e83d2670ac45f0e3ebd58f9aa2c430d12daf2cd7cdc7fc8049e35"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-win32.whl", hash = "sha256:95fa9482fab216806632e80c0f9ad3d28433fb73295afbb2fc56b8d437de70c5"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-win_amd64.whl", hash = "sha256:59b95cfb84795bf0c23a6d9eac43fbba6de0fb57ac8d12efdcf82e6408a2ff4c"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8616917cdacdd388c2678faf8989594a8db2fac35b324931aff18193b5e8e97e"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1bbc60823cfced80b2abc0042d515ad18f02900577b230c97203bac805120eb6"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ac16feb0a0510b5ee0cfcc898bdaa7f149e7dc250126bf302828fdec24189e"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf214483628740bac0801abba8d124408bfc21aeec26a97a81996975b6795f4a"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77f4160de7db18d8f3ba9ebce12c629be825e630a85380618bc2f4fe2fdcf565"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1eac642de2aeef60644944a9fae576fda4d6216a0b5880e50cc68459090bb0"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:12d31e3bd21fdc3b7c797a22ec94a41d584cd78ea925c6145ae83c74930a2675"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:79e5611a67d35cd6f444e9b715cb0bfbf216129462e1fe244dd27474c3e12ba5"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-win32.whl", hash = "sha256:00a53122c895ff9a364542a03c851c651331c986d601a6a3f0a8d64a63d3f33a"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-win_amd64.whl", hash = "sha256:4e826059be77957c695c0925f4f94f4111cc18e9cfa80798469e13d786d43034"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2d0fc9fca68451eddf84c0261be951b45d29b3cb0fe2775abd97f2963e52a5cc"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9fbc8c3460c9016357aa142bf1cef62e67c4e8e92344ecef50368507f551c4e"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1872550d0859608dfeef90fef80c0f21d19f390dd301562e9dfb4c58010a825c"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cceb3db2d3ce4bb94c21fc648af42aa3f29ea9c001f50cff862562d7a314f044"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06ab8705a60c2ba75be4e160eba97a073da5f3da7b2a8fe75e2a3c0f6d943ee8"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8d1f99db0d091dcd9d488a3cde76973048d678e2bd7408d46b11a9dc0cac8963"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7d696c31145a9373d71388f035b79cd5182b6931aa4663b4f22c9c339607022b"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0fac18dc51bbd9ed7c54271b6b9f45bdf4aeded51903da05d6250a6e851437c3"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-win32.whl", hash = "sha256:583c0928431cd0da4ad234a70935631035b13f99a746cb0e6b75876c890adb9c"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-win_amd64.whl", hash = "sha256:9440118da473b22bfb4c1e28de8f3ee03fb8cc5b30d3bcd47e60a2e07007f907"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6d5afcb747d562db33a8d89f82eb0b14a98d020553793650b8c7b0098cdd4228"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42655edd00c72f29ad493ae2c40b149285d174eb8c7f53570566f575b8bc1834"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c9a3827daa446d72f2085bebfd06f9b2922a17bc632bcb6874d9e015667f6d"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdc483d70d6c465667e509c67ebe90df1547f3c4dc40141215a23231b0f508b1"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ca9e774178758503d45c07f0b7a3c6e9873d40265057a1dcfb698913ddef743a"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4ea52cef11a3fa52aa4800b31cff1161719046775243b2f211b5dbb0e7b82cf2"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63fd602b8a5261713cec048a31e2983f6302be5fb3476f57ae38c6c827857b3e"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47c0d9fdeda44de81f5c5c87e3b978f1d9f39a22b3f4239f341d5dcff42f0e73"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4dee8e39d03a3092663272d601e3274b6b350332ae5cecb3909fbac411a2287"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d6fdc70fc0285556704aa3087cc443cefa6b679a72c1559a70cac3d31a2bd3bf"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c6faf19cf11f29986bd3ea568a86859fa3d492429268d6e2dd632d6cf48fe62b"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17d15ce6d21f8f8cd8ae9a76df0c2bc713e1741b42a9851d13ac12e450e63667"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b3b3dc9c25b4f5b93d79a338eb3092cde61cfb5e25b76b6456ffe01637b138"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcc6840d8204ea621b7cfaee79d873dbea8314a47ba39e05894f0338c05641be"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ea04b094cf3a2cb0335b6f46d895a5019d2eab0b2eb9a0abbbf939d473c36218"}, +] + +[package.dependencies] +certifi = "*" +lz4 = "*" +pytz = "*" +urllib3 = ">=1.26" +zstandard = "*" + +[package.extras] +arrow = ["pyarrow"] +numpy = ["numpy"] +orjson = ["orjson"] +pandas = ["pandas"] +sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] + +[[package]] +name = "clickhouse-driver" +version = "0.2.7" +description = "Python driver with native interface for ClickHouse" +optional = true +python-versions = ">=3.7, <4" +files = [ + {file = "clickhouse-driver-0.2.7.tar.gz", hash = "sha256:299cfbe6d561955d88eeab6e09f3de31e2f6daccc6fdd904a59e46357d2d28d9"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c44fefc2fd44f432d5b162bfe34ad76840137c34167d46a18c554a7c7c6e3566"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e018452a7bf8d8c0adf958afbc5b0d29e402fc09a1fb34e9186293eae57f3b4e"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff8b09f8b13df28d2f91ee3d0d2edd9589cbda76b74acf60669112219cea8c9d"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54aa91c9512fd5a73f038cae4f67ca2ff0b2f8a84de846179a31530936ef4e20"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8342a7ba31ccb393ee31dfd61173aa84c995b4ac0b44d404adc8463534233d5"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:199000f8adf38fade0b5a52c273a396168105539de741a18ba3e68d7fc06e0e6"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60a2a40602b207506e505cfb184a81cd4b752bde17153bc0b32c3931ddb792f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5db3a26b18146b2b0b06d3f32ce588af5afaa38c719daf6f9606981514228a8b"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5579a31da1f3cf49630e43fbbb11cab891b78161abdcb33908b79820b7cd3a23"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cc39f0fb761aed96917b0f55679174a50f9591afc0e696e745cd698ef822661f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9aa0f7c740e4e61886c6d388792c5d1a2084d4b5462e6dcfc24e30ca7e7f8e68"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2caee88b6eec7b33ddbccd24501ad99ff8ff2b0a6a4471945cbfb28947a9a791"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win32.whl", hash = "sha256:a4aef432cc7120a971eebb7ca2fddac4472e810b57e403d3a371b0c69cbb2bb0"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f307de7df6bc23ad5ec8a1ba1db157f4d14de673ddd4798f37790f23255605b0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbf3ca8919bf856ca6588669a863065fb732a32a6387095f64d19038fd99db9f"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab68b3d9b9d1386adfd3a57edd47b62858a145bf7ccc7f11b31d308195d966e5"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985a9d60044c5ad39c6e018b852c7105ec4ebfdf4c3abe23183b4867454e570a"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c94330054c8d92d2286898906f843f26e2f96fc2aa11a9a96a7b5593d299bf0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92938f55c8f797e50e624a4b96e685178d043cdf0ede306a7fd4e7dda19b8dfd"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bd53e9bf49c3013d06f9e6d2812872d44b150f7a2d1cf18e1498257d42330e"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f1f8ed5404e283a9ded499c33eade2423fdc15e31f8a711d75e91f890d0f70b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a398085e4a1766d907ac32c282d4172db38a44243bde303372396208d1cbf4bb"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fa1808593123b6056f93808f0afbc7938f06a8149cb4e381aa7b1a234c1d3c18"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:0512d54ae23bd4a69278e04f42b651d7c71b63ba6043e2c6bd97b11329692f99"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5bc2b67e7e68f74ccebf95a8b3a13f13a7c34b89b32c9813103221de14c06c8b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:04a37cdafc671cb796af3e566cef0aeb39111d82aebeecd9106a049434953b26"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win32.whl", hash = "sha256:019538c7c23e976538e5081dd2f77a8a40bf663c638a62d857ff05f42b0c9052"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5166643683584bc53fcadda73c65f6a9077feb472f3d167ecef1a1a7024973aa"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:59affab7b5a3c4aab5b6a730f606575efdefea213458de2eb14927ee4e0640f4"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcb93dd07fe65ac4f1a2bc0b8967911d4ad2152dbee000f025ea5cb575da5ecb"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55a48019b79181ae1ca90e980e74c5d413c3f8829f6744e2b056646c2d435a1a"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:507463c9157240fd7c3246781e8c30df8db3c80bf68925b36ff3ad4a80c4b924"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2d8d2295ee9e0cfab8ad77cb635a05da2160334b4f16ed8c3d00fbf39a2343"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e38c44546dcdb956b5ab0944cb3d51e8c98f816e75bab1a2254c478865bc6e7b"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6690a2bdd9e7531fe50b53193279f8b35cbcd5c5ee36c0fcc112518a7d24f16"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc6b4ba0a6467fd09021aa1d87a44fb4589600d61b010fca41e0dfffd0dee322"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:254bbd400eb87ff547a08755bc714f712e11f7a6d3ebbbb7aaa1dd454fb16d44"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7bbbe3f8b87fc1489bc15fa9c88cc9fac9d4d7d683d076f058c2c83e6ee422fd"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:745e5b18f0957d932151527f1523d0e516c199de8c589638e5f55ab2559886f3"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fa0357fb5f26149e3df86a117d3678329b85d8827b78a5a09bbf224d8dd4541"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win32.whl", hash = "sha256:ace652af7ca94ba3cb3a04a5c363e135dc5009f31d8201903e21db9d5daf2358"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:c0ba68489544df89e4138a14b0ec3e1e5eb102d5d3283a91d9b837c420c0ab97"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66267e4ba21fa66c97ce784a5de2202d3b7d4db3e50bfcdde92830a68f6fae30"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cf55c285b75c178487407721baef4980b3c6515c9c0c1a6c1ea8b001afe658e"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:deeb66bb56490db2157f199c6d9aa2c53f046677be430cc834fc1e74eec6e654"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dfe5b4020939abeeb407b4eead598c954b1573d2d2b4f174f793b196d378b9d9"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84d39506b5f8d86a1195ebde1c66aba168f34ebce6ebd828888f0625cac54774"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f93a27db2dcbbd3ecad36e8df4395d047cb7410e2dc69f6d037674e15442f4ee"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ebc29e501e47ecbfd44c89c0e5c87b2a722049d38b9e93fdd4bea510a82e16ac"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f9cc8c186fea09a94d89e5c9c4e8d05ec3a80e2f6d25673c48efec8117a13cfc"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0757dfde5410c42230b24825ea3ab904a78160520e5ceb953482e133e368733b"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c9f88818cf411f928c29ba295c677cd95773bd256b8490f5655fb489e0c6658c"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e19952f158ebe274c65ffeb294ba378d75048a48f31b77573948d606bed019d5"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:008b1f32c7c68564de8051482b72a5289b6933bca9d9b1ad1474dd448d6768ba"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:622933cc9834c39f03de5d43a12f13fc7133d31d6d2597e67866d4a549ca9e60"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:92540581e5b5f36d915f14d05c30244870fb123c74b38c645fa47663053c5471"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02dfadc6111b64e01c20b8c11266cab97d4f06685a392a183af437f2f1afb990"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ca17fece86fe85d97705024bec881978271931b3d00db273c9d63244f7d606"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76474f1315ca3ab484ae28ad085b8f756c8b9a755882f93912b2149290482033"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5c0ff12368b34aaf58dd948b0819e5b54d261911de334d3f048328dc9354013"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd441b17294e90e313b08fabf84fcc782c191d2b9b2a924f163928202db6fcc"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62aa158f61d7d84c58e8cd75b3b8340b28607e5a70132395078f578d518aaae3"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcb2a39a1fef8bf1b581f06125c2a84a5b92c939b079d1a95126e3207b05dc77"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f29cc641a65e89a51a15f6d195f565ad2761d1bd653408c6b4046c987c5fb99"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ac1a43690696bda46c9a23fc6fd79b6fe22d428a18e880bdbdf5e6aeb31008c5"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1dd5ea4584c42f85d96ddfa7d07da2abb35a797c45e4d3a66ace149ee4977cad"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a736c0af858a3c83af03848b18754ab18dc594cc7f3bf6be0b1fac682def182c"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win32.whl", hash = "sha256:6cb8ca47f5818c1bc5814b9ff775e383f3c50059b1fd28a02cb9be1b666929f8"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:a90e7dc92985669a5e6569356bb3028d9d475f95006d4487cb0789aa53f9489c"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04b77cd6c583da9135db4a62c5a7999ae248c2dbfc0cb8e8a3d8a853b1fbfa11"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c7671f8c0e8960d766b2e0eaefcae3088fccdd3920e9cd3dee8e344cfd0a6929"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:502d7cd28522b95a399e993ffd48487e8c12c50ce2d4e89b77b938f945304405"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:969739279f4010e7b5b6b2c9d2ab56a463aed11fdaed5e02424c1b3915f144f8"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed34b60f741eeb02407ea72180d77cbfc368c1be6fc2f2ff8319d1856ce67e10"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a667b48927f4420eb8c03fa33369edfbdf359a788897a01ac945263a2a611461"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f93aa3a90f3847872d7464ec9076482b2e812c4e7d61682daedffdf3471be00"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:190890667215691fdf2155c3b233b39146054ab1cd854c7d91221e6ed633d71e"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ff280aeac5e96c764cd31ba1077c95601337b9a97fb0b9ed4d24c64431f2c322"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01e63e35d2ab55b8eb48facf6e951968c80d27ee6703aa6c91c73d9d0a4d0efe"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a29fb24b910dafc8c11ba882797d13ec0323a97dce80a57673116fa893d1b669"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5f229a7853fc767e63143ea69889d49f6fd5623adc2f7b0f7eb360117d7e91a5"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win32.whl", hash = "sha256:b7f34ad2ed509f48f8ed1f9b96e89765173a7b35d286c7350aa85934a11c0f49"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:78b166597afbe490cc0cdac44fed8c8b81668f87125601dda17b154f237eef5d"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:16ab64beb8d079cb9b3200539539a35168f524eedf890c9acefb719e25bdc96e"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03e28fd50fc7c54874bf8e638a2ea87f73ae35bfbbf90123fdb395f38d62f159"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0677b8350acd8d186b6acd0026b62dd262d6fee428a5fa3ad9561908d4b02c39"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2f3c9e2182809131701bb28a606dec90525c7ab20490714714a4b3eb015454b"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e03a1a1b30cc58c9bd2cbe25bf5e40b1f1d16d52d44ddefb3af50435d1ed613c"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a1be8081306a4beb12444ed8e3208e1eb6c01ed207c471b33009c13504c88139"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:933b40722cbca9b1123a5bb2fb4bafafd234deae0f3481125cb6b6fa1d39aa84"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054b5022f9bf15a5f4663a7cd190f466e70a2d7b8d45429d8742c515b556c10"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61744760ee046c9a268cb801ca21bfe44c4873db9901a7cd0f3ca8830205feff"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5e28427e05a72e7a4c3672e36703a2d80107ee0b3ab537e3380d726c96b07821"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c483f5ec836ae87803478f2a7b9daf15343078edd6a8be7364dd9db64905bbd0"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28220b794874e68bc2f06dbfff5748f1c5a3236922f59e127abd58d44ae20a3f"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c09877b59b34d5b3043ad70ec31543173cac8b64b4a8afaa89416b22fb28da5"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3580f78db27119f7380627873214ae1342066f1ecb35700c1d7bf418dd70ae73"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0842ac1b2f7a9ca46dac2027849b241bccd8eb8ff1c59cb0a5874042b267b733"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7a3fb585e2d3514196258a4a3b0267510c03477f3c2380239ade4c056ba689a7"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48ea25287566d45efbaee0857ad25e8b33ffd7fd73e89424d79fe7f532962915"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee4a4935667b59b4816a5ca77300f5dbe5a7416860551d17376426b8fefc1175"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:358058cfceea9b43c4af9de81842563746f16984b34525a15b41eacf8fc2bed2"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ae760fb843dec0b5c398536ca8dfaf243f494ba8fc68132ae1bd62004b0c396a"}, +] + +[package.dependencies] +pytz = "*" +tzlocal = "*" + +[package.extras] +lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"] +numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] +zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] + [[package]] name = "colorama" version = "0.4.6" @@ -3465,56 +3676,6 @@ files = [ {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d140c7b9395b4d1e654127aa1c99bcc603ed01000b7bc7e28c52562f1894ec12"}, {file = "google_re2-1.1-3-cp39-cp39-win32.whl", hash = "sha256:80c5fc200f64b2d903eeb07b8d6cefc620a872a0240c7caaa9aca05b20f5568f"}, {file = "google_re2-1.1-3-cp39-cp39-win_amd64.whl", hash = "sha256:9eb6dbcee9b5dc4069bbc0634f2eb039ca524a14bed5868fdf6560aaafcbca06"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0db114d7e1aa96dbcea452a40136d7d747d60cbb61394965774688ef59cccd4e"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:82133958e003a1344e5b7a791b9a9dd7560b5c8f96936dbe16f294604524a633"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:9e74fd441d1f3d917d3303e319f61b82cdbd96b9a5ba919377a6eef1504a1e2b"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:734a2e7a4541c57253b5ebee24f3f3366ba3658bcad01da25fb623c78723471a"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:d88d5eecbc908abe16132456fae13690d0508f3ac5777f320ef95cb6cab9a961"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:b91db80b171ecec435a07977a227757dd487356701a32f556fa6fca5d0a40522"}, - {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b23129887a64bb9948af14c84705273ed1a40054e99433b4acccab4dcf6a226"}, - {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5dc1a0cc7cd19261dcaf76763e2499305dbb7e51dc69555167cdb8af98782698"}, - {file = "google_re2-1.1-4-cp310-cp310-win32.whl", hash = "sha256:3b2ab1e2420b5dd9743a2d6bc61b64e5f708563702a75b6db86637837eaeaf2f"}, - {file = "google_re2-1.1-4-cp310-cp310-win_amd64.whl", hash = "sha256:92efca1a7ef83b6df012d432a1cbc71d10ff42200640c0f9a5ff5b343a48e633"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:854818fd4ce79787aca5ba459d6e5abe4ca9be2c684a5b06a7f1757452ca3708"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:4ceef51174b6f653b6659a8fdaa9c38960c5228b44b25be2a3bcd8566827554f"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:ee49087c3db7e6f5238105ab5299c09e9b77516fe8cfb0a37e5f1e813d76ecb8"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:dc2312854bdc01410acc5d935f1906a49cb1f28980341c20a68797ad89d8e178"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0dc0d2e42296fa84a3cb3e1bd667c6969389cd5cdf0786e6b1f911ae2d75375b"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6bf04ced98453b035f84320f348f67578024f44d2997498def149054eb860ae8"}, - {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d6b6ef11dc4ab322fa66c2f3561925f2b5372a879c3ed764d20e939e2fd3e5f"}, - {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0dcde6646fa9a97fd3692b3f6ae7daf7f3277d7500b6c253badeefa11db8956a"}, - {file = "google_re2-1.1-4-cp311-cp311-win32.whl", hash = "sha256:5f4f0229deb057348893574d5b0a96d055abebac6debf29d95b0c0e26524c9f6"}, - {file = "google_re2-1.1-4-cp311-cp311-win_amd64.whl", hash = "sha256:4713ddbe48a18875270b36a462b0eada5e84d6826f8df7edd328d8706b6f9d07"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:40a698300b8faddbb325662973f839489c89b960087060bd389c376828978a04"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:103d2d7ac92ba23911a151fd1fc7035cbf6dc92a7f6aea92270ebceb5cd5acd3"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:51fb7182bccab05e8258a2b6a63dda1a6b4a9e8dfb9b03ec50e50c49c2827dd4"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:65383022abd63d7b620221eba7935132b53244b8b463d8fdce498c93cf58b7b7"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396281fc68a9337157b3ffcd9392c6b7fcb8aab43e5bdab496262a81d56a4ecc"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8198adcfcff1c680e052044124621730fc48d08005f90a75487f5651f1ebfce2"}, - {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81f7bff07c448aec4db9ca453d2126ece8710dbd9278b8bb09642045d3402a96"}, - {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7dacf730fd7d6ec71b11d6404b0b26e230814bfc8e9bb0d3f13bec9b5531f8d"}, - {file = "google_re2-1.1-4-cp312-cp312-win32.whl", hash = "sha256:8c764f62f4b1d89d1ef264853b6dd9fee14a89e9b86a81bc2157fe3531425eb4"}, - {file = "google_re2-1.1-4-cp312-cp312-win_amd64.whl", hash = "sha256:0be2666df4bc5381a5d693585f9bbfefb0bfd3c07530d7e403f181f5de47254a"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:5cb1b63a0bfd8dd65d39d2f3b2e5ae0a06ce4b2ce5818a1d1fc78a786a252673"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:e41751ce6b67a95230edd0772226dc94c2952a2909674cd69df9804ed0125307"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:b998cfa2d50bf4c063e777c999a7e8645ec7e5d7baf43ad71b1e2e10bb0300c3"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:226ca3b0c2e970f3fc82001ac89e845ecc7a4bb7c68583e7a76cda70b61251a7"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:9adec1f734ebad7c72e56c85f205a281d8fe9bf6583bc21020157d3f2812ce89"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:9c34f3c64ba566af967d29e11299560e6fdfacd8ca695120a7062b6ed993b179"}, - {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b85385fe293838e0d0b6e19e6c48ba8c6f739ea92ce2e23b718afe7b343363"}, - {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4694daa8a8987cfb568847aa872f9990e930c91a68c892ead876411d4b9012c3"}, - {file = "google_re2-1.1-4-cp38-cp38-win32.whl", hash = "sha256:5e671e9be1668187e2995aac378de574fa40df70bb6f04657af4d30a79274ce0"}, - {file = "google_re2-1.1-4-cp38-cp38-win_amd64.whl", hash = "sha256:f66c164d6049a8299f6dfcfa52d1580576b4b9724d6fcdad2f36f8f5da9304b6"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:25cb17ae0993a48c70596f3a3ef5d659638106401cc8193f51c0d7961b3b3eb7"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:5f101f86d14ca94ca4dcf63cceaa73d351f2be2481fcaa29d9e68eeab0dc2a88"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:4e82591e85bf262a6d74cff152867e05fc97867c68ba81d6836ff8b0e7e62365"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:1f61c09b93ffd34b1e2557e5a9565039f935407a5786dbad46f64f1a484166e6"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:12b390ad8c7e74bab068732f774e75e0680dade6469b249a721f3432f90edfc3"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:1284343eb31c2e82ed2d8159f33ba6842238a56782c881b07845a6d85613b055"}, - {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c7b38e0daf2c06e4d3163f4c732ab3ad2521aecfed6605b69e4482c612da303"}, - {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f4d4f0823e8b2f6952a145295b1ff25245ce9bb136aff6fe86452e507d4c1dd"}, - {file = "google_re2-1.1-4-cp39-cp39-win32.whl", hash = "sha256:1afae56b2a07bb48cfcfefaa15ed85bae26a68f5dc7f9e128e6e6ea36914e847"}, - {file = "google_re2-1.1-4-cp39-cp39-win_amd64.whl", hash = "sha256:aa7d6d05911ab9c8adbf3c225a7a120ab50fd2784ac48f2f0d140c0b7afc2b55"}, ] [[package]] @@ -7040,7 +7201,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -8567,6 +8727,24 @@ files = [ {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = true +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "uc-micro-py" version = "1.0.2" @@ -8987,11 +9165,73 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +[[package]] +name = "zstandard" +version = "0.22.0" +description = "Zstandard bindings for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"}, + {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"}, + {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"}, + {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"}, + {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"}, + {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"}, + {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"}, + {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"}, + {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"}, + {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"}, + {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"}, + {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"}, + {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + [extras] athena = ["botocore", "pyarrow", "pyathena", "s3fs"] az = ["adlfs"] bigquery = ["gcsfs", "google-cloud-bigquery", "grpcio", "pyarrow"] cli = ["cron-descriptor", "pipdeptree"] +clickhouse = ["adlfs", "clickhouse-connect", "clickhouse-driver", "gcsfs", "pyarrow", "s3fs"] databricks = ["databricks-sql-connector"] dbt = ["dbt-athena-community", "dbt-bigquery", "dbt-core", "dbt-databricks", "dbt-duckdb", "dbt-redshift", "dbt-snowflake"] duckdb = ["duckdb", "duckdb"] @@ -9012,4 +9252,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "e6e43e82afedfa274c91f3fd13dbbddd9cac64f386d2f5f1c4564ff6f5784cd2" +content-hash = "71329bd03e3d09294aaacc663896e47d9c22bf43ee1434bddb84beb4d7fc0269" From d6309b3561df88cdfddee3f15e169ea02d3f4e17 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 6 Apr 2024 22:06:06 +0200 Subject: [PATCH 063/127] Revert some files back to devel Signed-off-by: Marcel Coetzee --- dlt/cli/config_toml_writer.py | 22 +++++++------- .../specs/connection_string_credentials.py | 8 ++--- dlt/common/libs/sql_alchemy.py | 29 ++++++++++--------- dlt/destinations/impl/bigquery/bigquery.py | 11 +++---- .../impl/bigquery/bigquery_adapter.py | 6 ++-- .../impl/clickhouse/clickhouse.py | 5 ++-- dlt/destinations/job_impl.py | 4 +-- 7 files changed, 44 insertions(+), 41 deletions(-) diff --git a/dlt/cli/config_toml_writer.py b/dlt/cli/config_toml_writer.py index 97a23fa0ef..8cf831d725 100644 --- a/dlt/cli/config_toml_writer.py +++ b/dlt/cli/config_toml_writer.py @@ -36,9 +36,10 @@ def generate_typed_example(name: str, hint: AnyType) -> Any: if sc_type == "complex": if issubclass(inner_hint, C_Sequence): return ["a", "b", "c"] - table = tomlkit.table(False) - table["key"] = "value" - return table + else: + table = tomlkit.table(False) + table["key"] = "value" + return table if sc_type == "timestamp": return pendulum.now().to_iso8601_string() if sc_type == "date": @@ -73,14 +74,15 @@ def write_value( write_spec(inner_table, hint(), overwrite_existing) if len(inner_table) > 0: toml_table[name] = inner_table - elif default_value is None: - example_value = generate_typed_example(name, hint) - toml_table[name] = example_value - # tomlkit not supporting comments on boolean - if not isinstance(example_value, bool): - toml_table[name].comment("please set me up!") else: - toml_table[name] = default_value + if default_value is None: + example_value = generate_typed_example(name, hint) + toml_table[name] = example_value + # tomlkit not supporting comments on boolean + if not isinstance(example_value, bool): + toml_table[name].comment("please set me up!") + else: + toml_table[name] = default_value def write_spec(toml_table: TOMLTable, config: BaseConfiguration, overwrite_existing: bool) -> None: diff --git a/dlt/common/configuration/specs/connection_string_credentials.py b/dlt/common/configuration/specs/connection_string_credentials.py index 21e635a07c..2691c5d886 100644 --- a/dlt/common/configuration/specs/connection_string_credentials.py +++ b/dlt/common/configuration/specs/connection_string_credentials.py @@ -29,13 +29,13 @@ def parse_native_representation(self, native_value: Any) -> None: raise InvalidConnectionString(self.__class__, native_value, self.drivername) try: url = make_url(native_value) - # Update only values that are not None. + # update only values that are not None self.update({k: v for k, v in url._asdict().items() if v is not None}) if self.query is not None: - # Query may be immutable so make it mutable. + # query may be immutable so make it mutable self.query = dict(self.query) - except Exception as e: - raise InvalidConnectionString(self.__class__, native_value, self.drivername) from e + except Exception: + raise InvalidConnectionString(self.__class__, native_value, self.drivername) def on_resolved(self) -> None: if self.password: diff --git a/dlt/common/libs/sql_alchemy.py b/dlt/common/libs/sql_alchemy.py index a8797d1cb5..2f3b51ec0d 100644 --- a/dlt/common/libs/sql_alchemy.py +++ b/dlt/common/libs/sql_alchemy.py @@ -117,18 +117,21 @@ def _assert_port(cls, port: Optional[int]) -> Optional[int]: return None try: return int(port) - except TypeError as e: - raise TypeError("Port argument must be an integer or None") from e + except TypeError: + raise TypeError("Port argument must be an integer or None") @classmethod def _assert_str(cls, v: str, paramname: str) -> str: if not isinstance(v, str): - raise TypeError(f"{paramname} must be a string") + raise TypeError("%s must be a string" % paramname) return v @classmethod def _assert_none_str(cls, v: Optional[str], paramname: str) -> Optional[str]: - return v if v is None else cls._assert_str(v, paramname) + if v is None: + return v + + return cls._assert_str(v, paramname) @classmethod def _str_dict( @@ -251,14 +254,14 @@ def update_query_pairs( new_query: Mapping[str, Union[str, Sequence[str]]] if append: - new_query = { - k: ( - tuple(to_list(existing_query[k]) + to_list(new_keys[k])) - if k in existing_query - else new_keys[k] - ) - for k in new_keys - } + new_query = {} + + for k in new_keys: + if k in existing_query: + new_query[k] = tuple(to_list(existing_query[k]) + to_list(new_keys[k])) + else: + new_query[k] = new_keys[k] + new_query.update( {k: existing_query[k] for k in set(existing_query).difference(new_keys)} ) @@ -280,7 +283,7 @@ def update_query_dict( def render_as_string(self, hide_password: bool = True) -> str: """Render this `URL` object as a string.""" - s = f"{self.drivername}://" + s = self.drivername + "://" if self.username is not None: s += quote(self.username, safe=" +") if self.password is not None: diff --git a/dlt/destinations/impl/bigquery/bigquery.py b/dlt/destinations/impl/bigquery/bigquery.py index 279917d3a0..b2e53f9734 100644 --- a/dlt/destinations/impl/bigquery/bigquery.py +++ b/dlt/destinations/impl/bigquery/bigquery.py @@ -232,10 +232,9 @@ def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> if insert_api == "streaming": if table["write_disposition"] != "append": raise DestinationTerminalException( - ( - "BigQuery streaming insert can only be used with `append` write_disposition, while " - f'the given resource has `{table["write_disposition"]}`.' - ) + "BigQuery streaming insert can only be used with `append`" + " write_disposition, while the given resource has" + f" `{table['write_disposition']}`." ) if file_path.endswith(".jsonl"): job_cls = DestinationJsonlLoadJob @@ -364,7 +363,9 @@ def prepare_load_table( def _get_column_def_sql(self, column: TColumnSchema, table_format: TTableFormat = None) -> str: name = self.capabilities.escape_identifier(column["name"]) - column_def_sql = f"{name} {self.type_mapper.to_db_type(column, table_format)} {self._gen_not_null(column.get('nullable', True))}" + column_def_sql = ( + f"{name} {self.type_mapper.to_db_type(column, table_format)} {self._gen_not_null(column.get('nullable', True))}" + ) if column.get(ROUND_HALF_EVEN_HINT, False): column_def_sql += " OPTIONS (rounding_mode='ROUND_HALF_EVEN')" if column.get(ROUND_HALF_AWAY_FROM_ZERO_HINT, False): diff --git a/dlt/destinations/impl/bigquery/bigquery_adapter.py b/dlt/destinations/impl/bigquery/bigquery_adapter.py index 8943b0da79..6b3ef32b0f 100644 --- a/dlt/destinations/impl/bigquery/bigquery_adapter.py +++ b/dlt/destinations/impl/bigquery/bigquery_adapter.py @@ -153,10 +153,8 @@ def bigquery_adapter( if insert_api is not None: if insert_api == "streaming" and data.write_disposition != "append": raise ValueError( - ( - "BigQuery streaming insert can only be used with `append` write_disposition, while " - f"the given resource has `{data.write_disposition}`." - ) + "BigQuery streaming insert can only be used with `append` write_disposition, while " + f"the given resource has `{data.write_disposition}`." ) additional_table_hints |= {"x-insert-api": insert_api} # type: ignore[operator] diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 362e26a4d1..ca9dc49e66 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -53,9 +53,9 @@ FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING, SUPPORTED_FILE_FORMATS, ) -from dlt.destinations.insert_job_client import InsertValuesJobClient from dlt.destinations.job_client_impl import ( SqlJobClientBase, + SqlJobClientWithStaging, ) from dlt.destinations.job_impl import NewReferenceJob, EmptyLoadJob from dlt.destinations.sql_jobs import SqlMergeJob @@ -187,7 +187,6 @@ def __init__( clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] # compression = "none" if config.get("data_writer.disable_compression") else "gz" - table_function: str = "" statement: str = "" if bucket_scheme in ("s3", "gs", "gcs"): @@ -445,7 +444,7 @@ def gen_merge_sql( return sql -class ClickhouseClient(InsertValuesJobClient, SupportsStagingDestination): +class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() def __init__( diff --git a/dlt/destinations/job_impl.py b/dlt/destinations/job_impl.py index b5238ea4ba..8e017fc791 100644 --- a/dlt/destinations/job_impl.py +++ b/dlt/destinations/job_impl.py @@ -56,7 +56,7 @@ class NewReferenceJob(NewLoadJobImpl): def __init__( self, file_name: str, status: TLoadJobState, exception: str = None, remote_path: str = None ) -> None: - file_name = f"{os.path.splitext(file_name)[0]}.reference" + file_name = os.path.splitext(file_name)[0] + ".reference" super().__init__(file_name, status, exception) self._remote_path = remote_path self._save_text_file(remote_path) @@ -68,7 +68,7 @@ def is_reference_job(file_path: str) -> bool: @staticmethod def resolve_reference(file_path: str) -> str: with open(file_path, "r+", encoding="utf-8") as f: - # Reading from a file. + # Reading from a file return f.read() From dfc274ddfc8af233faa2aa149e5f6937c3e30fa9 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 6 Apr 2024 22:45:41 +0200 Subject: [PATCH 064/127] Remove redundant merge logic #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 156 +----------------- 1 file changed, 1 insertion(+), 155 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index ca9dc49e66..2bdfb82a23 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -32,13 +32,8 @@ TTableSchemaColumns, TColumnSchemaBase, ) -from dlt.common.schema.utils import ( - get_columns_names_with_prop, - get_first_column_name_with_prop, - get_dedup_sort_tuple, -) from dlt.common.storages import FileStorage -from dlt.destinations.exceptions import MergeDispositionException, LoadJobTerminalException +from dlt.destinations.exceptions import LoadJobTerminalException from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.clickhouse_adapter import ( TTableEngineType, @@ -294,155 +289,6 @@ def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: # return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" - @classmethod - def gen_merge_sql( - cls, table_chain: Sequence[TTableSchema], sql_client: ClickhouseSqlClient # type: ignore[override] - ) -> List[str]: - sql: List[str] = [] - root_table = table_chain[0] - - escape_id = sql_client.capabilities.escape_identifier - escape_lit = sql_client.capabilities.escape_literal - if escape_id is None: - escape_id = DestinationCapabilitiesContext.generic_capabilities().escape_identifier - if escape_lit is None: - escape_lit = DestinationCapabilitiesContext.generic_capabilities().escape_literal - - root_table_name = sql_client.make_qualified_table_name(root_table["name"]) - with sql_client.with_staging_dataset(staging=True): - staging_root_table_name = sql_client.make_qualified_table_name(root_table["name"]) - primary_keys = list( - map( - escape_id, - get_columns_names_with_prop(root_table, "primary_key"), - ) - ) - merge_keys = list( - map( - escape_id, - get_columns_names_with_prop(root_table, "merge_key"), - ) - ) - key_clauses = cls._gen_key_table_clauses(primary_keys, merge_keys) - - unique_column: str = None - root_key_column: str = None - - if len(table_chain) == 1: - key_table_clauses = cls.gen_key_table_clauses( - root_table_name, staging_root_table_name, key_clauses, for_delete=True - ) - sql.extend(f"DELETE {clause};" for clause in key_table_clauses) - else: - key_table_clauses = cls.gen_key_table_clauses( - root_table_name, staging_root_table_name, key_clauses, for_delete=False - ) - unique_columns = get_columns_names_with_prop(root_table, "unique") - if not unique_columns: - raise MergeDispositionException( - sql_client.fully_qualified_dataset_name(), - staging_root_table_name, - [t["name"] for t in table_chain], - f"There is no unique column (ie _dlt_id) in top table {root_table['name']} so" - " it is not possible to link child tables to it.", - ) - unique_column = escape_id(unique_columns[0]) - create_delete_temp_table_sql, delete_temp_table_name = cls.gen_delete_temp_table_sql( - unique_column, key_table_clauses - ) - sql.extend(create_delete_temp_table_sql) - - for table in table_chain[1:]: - table_name = sql_client.make_qualified_table_name(table["name"]) - root_key_columns = get_columns_names_with_prop(table, "root_key") - if not root_key_columns: - raise MergeDispositionException( - sql_client.fully_qualified_dataset_name(), - staging_root_table_name, - [t["name"] for t in table_chain], - "There is no root foreign key (ie _dlt_root_id) in child table" - f" {table['name']} so it is not possible to refer to top level table" - f" {root_table['name']} unique column {unique_column}", - ) - root_key_column = escape_id(root_key_columns[0]) - sql.append( - cls.gen_delete_from_sql( - table_name, root_key_column, delete_temp_table_name, unique_column - ) - ) - - sql.append( - cls.gen_delete_from_sql( - root_table_name, unique_column, delete_temp_table_name, unique_column - ) - ) - - not_deleted_cond: str = None - hard_delete_col = get_first_column_name_with_prop(root_table, "hard_delete") - if hard_delete_col is not None: - not_deleted_cond = f"{escape_id(hard_delete_col)} IS NULL" - if root_table["columns"][hard_delete_col]["data_type"] == "bool": - not_deleted_cond += f" OR {escape_id(hard_delete_col)} = {escape_lit(False)}" - - dedup_sort = get_dedup_sort_tuple(root_table) - - insert_temp_table_name: str = None - if len(table_chain) > 1 and (primary_keys or hard_delete_col is not None): - condition_columns = [hard_delete_col] if not_deleted_cond is not None else None - ( - create_insert_temp_table_sql, - insert_temp_table_name, - ) = cls.gen_insert_temp_table_sql( - staging_root_table_name, - primary_keys, - unique_column, - dedup_sort, - not_deleted_cond, - condition_columns, - ) - sql.extend(create_insert_temp_table_sql) - - to_delete: List[str] = [] - - for table in table_chain: - table_name = sql_client.make_qualified_table_name(table["name"]) - with sql_client.with_staging_dataset(staging=True): - staging_table_name = sql_client.make_qualified_table_name(table["name"]) - - insert_cond = not_deleted_cond if hard_delete_col is not None else "1 = 1" - if ( - primary_keys - and len(table_chain) > 1 - or not primary_keys - and table.get("parent") is not None - and hard_delete_col is not None - ): - uniq_column = unique_column if table.get("parent") is None else root_key_column - insert_cond = f"{uniq_column} IN (SELECT * FROM {insert_temp_table_name})" - - columns = list(map(escape_id, get_columns_names_with_prop(table, "name"))) - col_str = ", ".join(columns) - select_sql = f"SELECT {col_str} FROM {staging_table_name} WHERE {insert_cond}" - if primary_keys and len(table_chain) == 1: - select_sql = cls.gen_select_from_dedup_sql( - staging_table_name, primary_keys, columns, dedup_sort, insert_cond - ) - - sql.extend([f"INSERT INTO {table_name}({col_str}) {select_sql};"]) - - if table_name is not None and table_name.startswith("delete_"): - to_delete.extend([table_name]) - if insert_temp_table_name is not None and insert_temp_table_name.startswith("delete_"): - to_delete.extend([insert_temp_table_name]) - - # TODO: Doesn't remove all `delete_` tables. - for delete_table_name in to_delete: - sql.extend( - [f"DROP TABLE IF EXISTS {sql_client.make_qualified_table_name(delete_table_name)};"] - ) - - return sql - class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() From 9d0e1ba4eb7bc8e9b7fb007db7ef35ede7fd273e Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 6 Apr 2024 23:30:26 +0200 Subject: [PATCH 065/127] Spelling fix Signed-off-by: Marcel Coetzee --- dlt/common/configuration/specs/base_configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dlt/common/configuration/specs/base_configuration.py b/dlt/common/configuration/specs/base_configuration.py index 06fb97fcdd..1329feae6c 100644 --- a/dlt/common/configuration/specs/base_configuration.py +++ b/dlt/common/configuration/specs/base_configuration.py @@ -197,7 +197,7 @@ def default_factory(att_value=att_value): # type: ignore[no-untyped-def] synth_init = init and ((not base_params or base_params.init) and has_default_init) if synth_init != init and has_default_init: warnings.warn( - f"__init__ method will not be generated on {cls.__name__} because bas class didn't" + f"__init__ method will not be generated on {cls.__name__} because base class didn't" " synthesize __init__. Please correct `init` flag in confispec decorator. You are" " probably receiving incorrect __init__ signature for type checking" ) From 9f41eddb2e25033027728af5ea8d418791a6bdfd Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 6 Apr 2024 23:37:50 +0200 Subject: [PATCH 066/127] Don't synthesise CH credentials __init__. #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index f35d6f944f..c600fcb055 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -13,7 +13,7 @@ TSecureConnection = Literal[0, 1] -@configspec +@configspec(init=False) class ClickhouseCredentials(ConnectionStringCredentials): drivername: str = "clickhouse" host: str # type: ignore From 584ebe78569ea4457c494b48683f0a4bda95ec38 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sun, 7 Apr 2024 00:11:31 +0200 Subject: [PATCH 067/127] Optimize import and type hinting in Clickhouse factory #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/factory.py | 9 ++++++--- tests/load/clickhouse/test_clickhouse_configuration.py | 2 +- tests/load/clickhouse/test_clickhouse_table_builder.py | 6 +++--- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 2242d30565..a95d4d450d 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -4,22 +4,25 @@ from dlt.common.destination import Destination, DestinationCapabilitiesContext from dlt.destinations.impl.clickhouse import capabilities -from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient from dlt.destinations.impl.clickhouse.configuration import ( ClickhouseClientConfiguration, ClickhouseCredentials, ) +if t.TYPE_CHECKING: + from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient + + # noinspection PyPep8Naming -class clickhouse(Destination[ClickhouseClientConfiguration, ClickhouseClient]): +class clickhouse(Destination[ClickhouseClientConfiguration, "ClickhouseClient"]): spec = ClickhouseClientConfiguration def capabilities(self) -> DestinationCapabilitiesContext: return capabilities() @property - def client_class(self) -> t.Type[ClickhouseClient]: + def client_class(self) -> t.Type["ClickhouseClient"]: from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient return ClickhouseClient diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index 1b6fab2f28..f2248328c5 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -20,7 +20,7 @@ def test_clickhouse_connection_string_with_all_params() -> None: url = "clickhouse://user1:pass1@host1:9000/testdb?secure=0&connect_timeout=230&send_receive_timeout=1000" - creds = ClickhouseCredentials() # type: ignore + creds = ClickhouseCredentials() creds.parse_native_representation(url) assert creds.database == "testdb" diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 0d1ba2f334..12a814de48 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -17,7 +17,7 @@ @pytest.fixture def clickhouse_client(empty_schema: Schema) -> ClickhouseClient: # Return a client without opening connection. - creds = ClickhouseCredentials() # type: ignore + creds = ClickhouseCredentials() return ClickhouseClient( empty_schema, ClickhouseClientConfiguration(dataset_name=f"test_{uniq_id()}", credentials=creds), @@ -34,7 +34,7 @@ def test_clickhouse_configuration() -> None: "DESTINATION__CLICKHOUSE__CREDENTIALS__PASSWORD": "fuss_do_rah", } ): - C = resolve_configuration(ClickhouseCredentials(), sections=("destination", "clickhouse")) # type: ignore + C = resolve_configuration(ClickhouseCredentials(), sections=("destination", "clickhouse")) assert C.database == "mydb" assert C.password == "fuss_do_rah" @@ -42,7 +42,7 @@ def test_clickhouse_configuration() -> None: assert ClickhouseClientConfiguration().fingerprint() == "" # Based on host. c = resolve_configuration( - ClickhouseCredentials(), # type: ignore + ClickhouseCredentials(), explicit_value="clickhouse://user1:pass@host1/db1", ) assert ClickhouseClientConfiguration(credentials=c).fingerprint() == digest128("host1") From 32d3f6225cc3f0352ad36e2be3270fa5d6d657a9 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 8 Apr 2024 16:04:04 +0200 Subject: [PATCH 068/127] Revert back to temp table Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 2bdfb82a23..c5061b1911 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -283,11 +283,7 @@ def exception(self) -> str: class ClickhouseMergeJob(SqlMergeJob): @classmethod def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: - # Different sessions are created during the load process, and temporary tables - # do not persist between sessions. - # Resorting to persisted in-memory table to fix. - # return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" - return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" + return f"CREATE TEMPORARY TABLE {temp_table_name} AS {select_sql};" class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): From 6cb4ee85c182c453659aba51eec1cc24d575bd43 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 8 Apr 2024 16:06:57 +0200 Subject: [PATCH 069/127] Refactor Clickhouse to ClickHouse for consistency Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/__init__.py | 6 +-- .../impl/clickhouse/clickhouse.py | 44 +++++++++---------- .../impl/clickhouse/clickhouse_adapter.py | 2 +- .../impl/clickhouse/configuration.py | 8 ++-- dlt/destinations/impl/clickhouse/factory.py | 22 +++++----- .../impl/clickhouse/sql_client.py | 18 ++++---- .../test_clickhouse_configuration.py | 8 ++-- .../test_clickhouse_table_builder.py | 30 ++++++------- tests/load/test_job_client.py | 4 +- 9 files changed, 71 insertions(+), 71 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 6136e0078d..3f30229eb3 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -7,7 +7,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() - # Clickhouse only supports loading from staged files on s3 for now. + # ClickHouse only supports loading from staged files on s3 for now. caps.preferred_loader_file_format = "jsonl" caps.supported_loader_file_formats = ["parquet", "jsonl"] caps.preferred_staging_file_format = "jsonl" @@ -20,7 +20,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps.max_identifier_length = 255 caps.max_column_identifier_length = 255 - # Clickhouse has no max `String` type length. + # ClickHouse has no max `String` type length. caps.max_text_data_type_length = sys.maxsize caps.schema_supports_numeric_precision = True @@ -34,7 +34,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps.is_max_query_length_in_bytes = True caps.max_query_length = 262144 - # Clickhouse has limited support for transactional semantics, especially for `ReplicatedMergeTree`, + # ClickHouse has limited support for transactional semantics, especially for `ReplicatedMergeTree`, # the default ClickHouse Cloud engine. It does, however, provide atomicity for individual DDL operations like `ALTER TABLE`. # https://clickhouse-driver.readthedocs.io/en/latest/dbapi.html#clickhouse_driver.dbapi.connection.Connection.commit # https://clickhouse.com/docs/en/guides/developer/transactional#transactions-commit-and-rollback diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index c5061b1911..20304bf284 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -40,9 +40,9 @@ TABLE_ENGINE_TYPE_HINT, ) from dlt.destinations.impl.clickhouse.configuration import ( - ClickhouseClientConfiguration, + ClickHouseClientConfiguration, ) -from dlt.destinations.impl.clickhouse.sql_client import ClickhouseSqlClient +from dlt.destinations.impl.clickhouse.sql_client import ClickHouseSqlClient from dlt.destinations.impl.clickhouse.utils import ( convert_storage_to_http_scheme, FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING, @@ -59,8 +59,8 @@ HINT_TO_CLICKHOUSE_ATTR: Dict[TColumnHint, str] = { "primary_key": "PRIMARY KEY", - "unique": "", # No unique constraints available in Clickhouse. - "foreign_key": "", # No foreign key constraints support in Clickhouse. + "unique": "", # No unique constraints available in ClickHouse. + "foreign_key": "", # No foreign key constraints support in ClickHouse. } TABLE_ENGINE_TYPE_TO_CLICKHOUSE_ATTR: Dict[TTableEngineType, str] = { @@ -69,7 +69,7 @@ } -class ClickhouseTypeMapper(TypeMapper): +class ClickHouseTypeMapper(TypeMapper): sct_to_unbound_dbt = { "complex": "JSON", "text": "String", @@ -137,12 +137,12 @@ def from_db_type( return super().from_db_type(db_type, precision, scale) -class ClickhouseLoadJob(LoadJob, FollowupJob): +class ClickHouseLoadJob(LoadJob, FollowupJob): def __init__( self, file_path: str, table_name: str, - client: ClickhouseSqlClient, + client: ClickHouseSqlClient, staging_credentials: Optional[CredentialsConfiguration] = None, ) -> None: file_name = FileStorage.get_file_name_from_file_path(file_path) @@ -164,13 +164,13 @@ def __init__( if file_extension not in ["parquet", "jsonl"]: raise LoadJobTerminalException( - file_path, "Clickhouse loader Only supports parquet and jsonl files." + file_path, "ClickHouse loader Only supports parquet and jsonl files." ) # if not config.get("data_writer.disable_compression"): # raise LoadJobTerminalException( # file_path, - # "Clickhouse loader does not support gzip compressed files. Please disable" + # "ClickHouse loader does not support gzip compressed files. Please disable" # " compression in the data writer configuration:" # " https://dlthub.com/docs/reference/performance#disabling-and-enabling-file-compression.", # ) @@ -219,7 +219,7 @@ def __init__( if not isinstance(staging_credentials, AzureCredentialsWithoutDefaults): raise LoadJobTerminalException( file_path, - "Unsigned Azure Blob Storage access from Clickhouse isn't supported as yet.", + "Unsigned Azure Blob Storage access from ClickHouse isn't supported as yet.", ) # Authenticated access. @@ -260,12 +260,12 @@ def __init__( except clickhouse_connect.driver.exceptions.Error as e: raise LoadJobTerminalException( file_path, - f"Clickhouse connection failed due to {e}.", + f"ClickHouse connection failed due to {e}.", ) from e else: raise LoadJobTerminalException( file_path, - f"Clickhouse loader does not support '{bucket_scheme}' filesystem.", + f"ClickHouse loader does not support '{bucket_scheme}' filesystem.", ) # Don't use dbapi driver for local files. @@ -280,30 +280,30 @@ def exception(self) -> str: raise NotImplementedError() -class ClickhouseMergeJob(SqlMergeJob): +class ClickHouseMergeJob(SqlMergeJob): @classmethod def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: return f"CREATE TEMPORARY TABLE {temp_table_name} AS {select_sql};" -class ClickhouseClient(SqlJobClientWithStaging, SupportsStagingDestination): +class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() def __init__( self, schema: Schema, - config: ClickhouseClientConfiguration, + config: ClickHouseClientConfiguration, ) -> None: - self.sql_client: ClickhouseSqlClient = ClickhouseSqlClient( + self.sql_client: ClickHouseSqlClient = ClickHouseSqlClient( config.normalize_dataset_name(schema), config.credentials ) super().__init__(schema, config, self.sql_client) - self.config: ClickhouseClientConfiguration = config + self.config: ClickHouseClientConfiguration = config self.active_hints = deepcopy(HINT_TO_CLICKHOUSE_ATTR) - self.type_mapper = ClickhouseTypeMapper(self.capabilities) + self.type_mapper = ClickHouseTypeMapper(self.capabilities) def _create_merge_followup_jobs(self, table_chain: Sequence[TTableSchema]) -> List[NewLoadJob]: - return [ClickhouseMergeJob.from_table_chain(table_chain, self.sql_client)] + return [ClickHouseMergeJob.from_table_chain(table_chain, self.sql_client)] def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = None) -> str: # Build column definition. @@ -317,7 +317,7 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non ) # Alter table statements only accept `Nullable` modifiers. - # JSON type isn't nullable in Clickhouse. + # JSON type isn't nullable in ClickHouse. type_with_nullability_modifier = ( f"Nullable({self.type_mapper.to_db_type(c)})" if c.get("nullable", True) and c.get("data_type") != "complex" @@ -330,7 +330,7 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non ) def start_file_load(self, table: TTableSchema, file_path: str, load_id: str) -> LoadJob: - return super().start_file_load(table, file_path, load_id) or ClickhouseLoadJob( + return super().start_file_load(table, file_path, load_id) or ClickHouseLoadJob( file_path, table["name"], self.sql_client, @@ -395,7 +395,7 @@ def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns] schema_table[c[0]] = schema_c # type: ignore return True, schema_table - # Clickhouse fields are not nullable by default. + # ClickHouse fields are not nullable by default. @staticmethod def _gen_not_null(v: bool) -> str: diff --git a/dlt/destinations/impl/clickhouse/clickhouse_adapter.py b/dlt/destinations/impl/clickhouse/clickhouse_adapter.py index d219c288ae..1bbde8e45d 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse_adapter.py +++ b/dlt/destinations/impl/clickhouse/clickhouse_adapter.py @@ -24,7 +24,7 @@ def clickhouse_adapter(data: Any, table_engine_type: TTableEngineType = None) -> DltResource: - """Prepares data for the Clickhouse destination by specifying which table engine type + """Prepares data for the ClickHouse destination by specifying which table engine type that should be used. Args: diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index c600fcb055..88b01d1cc8 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -14,7 +14,7 @@ @configspec(init=False) -class ClickhouseCredentials(ConnectionStringCredentials): +class ClickHouseCredentials(ConnectionStringCredentials): drivername: str = "clickhouse" host: str # type: ignore """Host with running ClickHouse server.""" @@ -69,9 +69,9 @@ def to_url(self) -> URL: @configspec -class ClickhouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): +class ClickHouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): destination_type: Final[str] = "clickhouse" # type: ignore[misc] - credentials: ClickhouseCredentials # type: ignore + credentials: ClickHouseCredentials # type: ignore dataset_name: Final[str] = "" # type: ignore """dataset name in the destination to load data to, for schemas that are not default schema, it is used as dataset prefix""" @@ -91,7 +91,7 @@ def fingerprint(self) -> str: def __init__( self, *, - credentials: ClickhouseCredentials = None, + credentials: ClickHouseCredentials = None, dataset_name: str = None, destination_name: str = None, environment: str = None diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index a95d4d450d..6f2de84db7 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -5,43 +5,43 @@ from dlt.common.destination import Destination, DestinationCapabilitiesContext from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.configuration import ( - ClickhouseClientConfiguration, - ClickhouseCredentials, + ClickHouseClientConfiguration, + ClickHouseCredentials, ) if t.TYPE_CHECKING: - from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient + from dlt.destinations.impl.clickhouse.clickhouse import ClickHouseClient # noinspection PyPep8Naming -class clickhouse(Destination[ClickhouseClientConfiguration, "ClickhouseClient"]): - spec = ClickhouseClientConfiguration +class clickhouse(Destination[ClickHouseClientConfiguration, "ClickHouseClient"]): + spec = ClickHouseClientConfiguration def capabilities(self) -> DestinationCapabilitiesContext: return capabilities() @property - def client_class(self) -> t.Type["ClickhouseClient"]: - from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient + def client_class(self) -> t.Type["ClickHouseClient"]: + from dlt.destinations.impl.clickhouse.clickhouse import ClickHouseClient - return ClickhouseClient + return ClickHouseClient def __init__( self, - credentials: t.Union[ClickhouseCredentials, str, t.Dict[str, t.Any], Connection] = None, + credentials: t.Union[ClickHouseCredentials, str, t.Dict[str, t.Any], Connection] = None, destination_name: str = None, environment: str = None, **kwargs: t.Any, ) -> None: - """Configure the Clickhouse destination to use in a pipeline. + """Configure the ClickHouse destination to use in a pipeline. All arguments provided here supersede other configuration sources such as environment variables and dlt config files. Args: credentials: Credentials to connect to the clickhouse database. - Can be an instance of `ClickhouseCredentials`, or a connection string + Can be an instance of `ClickHouseCredentials`, or a connection string in the format `clickhouse://user:password@host:port/database`. **kwargs: Additional arguments passed to the destination config. """ diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 0857f15f52..184205da78 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -21,7 +21,7 @@ DatabaseTerminalException, ) from dlt.destinations.impl.clickhouse import capabilities -from dlt.destinations.impl.clickhouse.configuration import ClickhouseCredentials +from dlt.destinations.impl.clickhouse.configuration import ClickHouseCredentials from dlt.destinations.sql_client import ( DBApiCursorImpl, SqlClientBase, @@ -33,21 +33,21 @@ TRANSACTIONS_UNSUPPORTED_WARNING_MESSAGE = ( - "Clickhouse does not support transactions! Each statement is auto-committed separately." + "ClickHouse does not support transactions! Each statement is auto-committed separately." ) -class ClickhouseDBApiCursorImpl(DBApiCursorImpl): +class ClickHouseDBApiCursorImpl(DBApiCursorImpl): native_cursor: DictCursor -class ClickhouseSqlClient( +class ClickHouseSqlClient( SqlClientBase[clickhouse_driver.dbapi.connection.Connection], DBTransaction ): dbapi: ClassVar[DBApi] = clickhouse_driver.dbapi.connection.Connection capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() - def __init__(self, dataset_name: str, credentials: ClickhouseCredentials) -> None: + def __init__(self, dataset_name: str, credentials: ClickHouseCredentials) -> None: super().__init__(credentials.database, dataset_name) self._conn: clickhouse_driver.dbapi.connection = None self.credentials = credentials @@ -89,11 +89,11 @@ def execute_sql( return None if curr.description is None else curr.fetchall() def create_dataset(self) -> None: - # Clickhouse doesn't have schemas. + # ClickHouse doesn't have schemas. pass def drop_dataset(self) -> None: - # Since Clickhouse doesn't have schemas, we need to drop all tables in our virtual schema, + # Since ClickHouse doesn't have schemas, we need to drop all tables in our virtual schema, # or collection of tables, that has the `dataset_name` as a prefix. to_drop_results = self.execute_sql( """ @@ -120,7 +120,7 @@ def drop_dataset(self) -> None: @raise_database_error def execute_query( self, query: AnyStr, *args: Any, **kwargs: Any - ) -> Iterator[ClickhouseDBApiCursorImpl]: + ) -> Iterator[ClickHouseDBApiCursorImpl]: assert isinstance(query, str), "Query must be a string" db_args = kwargs.copy() @@ -144,7 +144,7 @@ def execute_query( except KeyError as e: raise DatabaseTransientException(OperationalError()) from e - yield ClickhouseDBApiCursorImpl(cursor) # type: ignore[abstract] + yield ClickHouseDBApiCursorImpl(cursor) # type: ignore[abstract] def fully_qualified_dataset_name(self, escape: bool = True) -> str: if escape: diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index f2248328c5..61862170ed 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -7,8 +7,8 @@ from dlt.common.libs.sql_alchemy import make_url from dlt.common.utils import digest128 from dlt.destinations.impl.clickhouse.configuration import ( - ClickhouseCredentials, - ClickhouseClientConfiguration, + ClickHouseCredentials, + ClickHouseClientConfiguration, ) from dlt.destinations.impl.snowflake.configuration import ( SnowflakeClientConfiguration, @@ -20,7 +20,7 @@ def test_clickhouse_connection_string_with_all_params() -> None: url = "clickhouse://user1:pass1@host1:9000/testdb?secure=0&connect_timeout=230&send_receive_timeout=1000" - creds = ClickhouseCredentials() + creds = ClickHouseCredentials() creds.parse_native_representation(url) assert creds.database == "testdb" @@ -40,7 +40,7 @@ def test_clickhouse_connection_string_with_all_params() -> None: def test_clickhouse_configuration() -> None: # def empty fingerprint - assert ClickhouseClientConfiguration().fingerprint() == "" + assert ClickHouseClientConfiguration().fingerprint() == "" # based on host c = resolve_configuration( SnowflakeCredentials(), diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 12a814de48..9d3fadfc47 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -6,21 +6,21 @@ from dlt.common.schema import Schema from dlt.common.utils import custom_environ, digest128 from dlt.common.utils import uniq_id -from dlt.destinations.impl.clickhouse.clickhouse import ClickhouseClient +from dlt.destinations.impl.clickhouse.clickhouse import ClickHouseClient from dlt.destinations.impl.clickhouse.configuration import ( - ClickhouseCredentials, - ClickhouseClientConfiguration, + ClickHouseCredentials, + ClickHouseClientConfiguration, ) from tests.load.utils import TABLE_UPDATE, empty_schema @pytest.fixture -def clickhouse_client(empty_schema: Schema) -> ClickhouseClient: +def clickhouse_client(empty_schema: Schema) -> ClickHouseClient: # Return a client without opening connection. - creds = ClickhouseCredentials() - return ClickhouseClient( + creds = ClickHouseCredentials() + return ClickHouseClient( empty_schema, - ClickhouseClientConfiguration(dataset_name=f"test_{uniq_id()}", credentials=creds), + ClickHouseClientConfiguration(dataset_name=f"test_{uniq_id()}", credentials=creds), ) @@ -34,21 +34,21 @@ def test_clickhouse_configuration() -> None: "DESTINATION__CLICKHOUSE__CREDENTIALS__PASSWORD": "fuss_do_rah", } ): - C = resolve_configuration(ClickhouseCredentials(), sections=("destination", "clickhouse")) + C = resolve_configuration(ClickHouseCredentials(), sections=("destination", "clickhouse")) assert C.database == "mydb" assert C.password == "fuss_do_rah" # Check fingerprint. - assert ClickhouseClientConfiguration().fingerprint() == "" + assert ClickHouseClientConfiguration().fingerprint() == "" # Based on host. c = resolve_configuration( - ClickhouseCredentials(), + ClickHouseCredentials(), explicit_value="clickhouse://user1:pass@host1/db1", ) - assert ClickhouseClientConfiguration(credentials=c).fingerprint() == digest128("host1") + assert ClickHouseClientConfiguration(credentials=c).fingerprint() == digest128("host1") -def test_clickhouse_create_table(clickhouse_client: ClickhouseClient) -> None: +def test_clickhouse_create_table(clickhouse_client: ClickHouseClient) -> None: statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, False) assert len(statements) == 1 sql = statements[0] @@ -88,7 +88,7 @@ def test_clickhouse_create_table(clickhouse_client: ClickhouseClient) -> None: assert "`col11_precision` DateTime" in sql -def test_clickhouse_alter_table(clickhouse_client: ClickhouseClient) -> None: +def test_clickhouse_alter_table(clickhouse_client: ClickHouseClient) -> None: statements = clickhouse_client._get_table_update_sql("event_test_table", TABLE_UPDATE, True) assert len(statements) == 1 sql = statements[0] @@ -138,7 +138,7 @@ def test_clickhouse_alter_table(clickhouse_client: ClickhouseClient) -> None: @pytest.mark.usefixtures("empty_schema") -def test_clickhouse_create_table_with_primary_keys(clickhouse_client: ClickhouseClient) -> None: +def test_clickhouse_create_table_with_primary_keys(clickhouse_client: ClickHouseClient) -> None: mod_update = deepcopy(TABLE_UPDATE) mod_update[1]["primary_key"] = True @@ -154,7 +154,7 @@ def test_clickhouse_create_table_with_primary_keys(clickhouse_client: Clickhouse "Only `primary_key` hint has been implemented so far, which isn't specified inline with the" " column definition." ) -def test_clickhouse_create_table_with_hints(client: ClickhouseClient) -> None: +def test_clickhouse_create_table_with_hints(client: ClickHouseClient) -> None: mod_update = deepcopy(TABLE_UPDATE) mod_update[0]["primary_key"] = True diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index 1fadda3df3..8a9f239a98 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -393,8 +393,8 @@ def test_get_storage_table_with_all_types(client: SqlJobClientBase) -> None: continue if client.config.destination_type == "databricks" and c["data_type"] in ("complex", "time"): continue - # Clickhouse has no active data type for binary or time type. - # TODO: JSON type is available, but not nullable in Clickhouse. + # ClickHouse has no active data type for binary or time type. + # TODO: JSON type is available, but not nullable in ClickHouse. if client.config.destination_type == "clickhouse": if c["data_type"] in ("binary", "time"): continue From a6461185605a04445603abd34f6c60826d098b71 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 8 Apr 2024 22:02:45 +0200 Subject: [PATCH 070/127] Support compression codec for azure and local #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/__init__.py | 2 ++ .../impl/clickhouse/clickhouse.py | 18 ++++-------- .../impl/clickhouse/sql_client.py | 1 + tests/load/utils.py | 28 +++++++++++++------ tests/utils.py | 1 + 5 files changed, 29 insertions(+), 21 deletions(-) diff --git a/dlt/destinations/__init__.py b/dlt/destinations/__init__.py index 13b7f7ed99..302de24a6b 100644 --- a/dlt/destinations/__init__.py +++ b/dlt/destinations/__init__.py @@ -14,6 +14,7 @@ from dlt.destinations.impl.synapse.factory import synapse from dlt.destinations.impl.databricks.factory import databricks from dlt.destinations.impl.dremio.factory import dremio +from dlt.destinations.impl.clickhouse.factory import clickhouse __all__ = [ @@ -32,5 +33,6 @@ "synapse", "databricks", "dremio", + "clickhouse", "destination", ] diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 20304bf284..c1bb9223c0 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -9,6 +9,7 @@ from jinja2 import Template import dlt +from dlt import config from dlt.common.configuration.specs import ( CredentialsConfiguration, AzureCredentialsWithoutDefaults, @@ -167,20 +168,12 @@ def __init__( file_path, "ClickHouse loader Only supports parquet and jsonl files." ) - # if not config.get("data_writer.disable_compression"): - # raise LoadJobTerminalException( - # file_path, - # "ClickHouse loader does not support gzip compressed files. Please disable" - # " compression in the data writer configuration:" - # " https://dlthub.com/docs/reference/performance#disabling-and-enabling-file-compression.", - # ) - bucket_url = urlparse(bucket_path) bucket_scheme = bucket_url.scheme file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - # compression = "none" if config.get("data_writer.disable_compression") else "gz" + compression = "none" if config.get("data_writer.disable_compression") else "gz" statement: str = "" @@ -191,7 +184,6 @@ def __init__( access_key_id = staging_credentials.aws_access_key_id secret_access_key = staging_credentials.aws_secret_access_key elif isinstance(staging_credentials, GcpCredentials): - # TODO: HMAC keys aren't implemented in `GcpCredentials`. access_key_id = dlt.config["destination.filesystem.credentials.gcp_access_key_id"] secret_access_key = dlt.config[ "destination.filesystem.credentials.gcp_secret_access_key" @@ -233,7 +225,7 @@ def __init__( table_function = ( "SELECT * FROM" - f" azureBlobStorage('{storage_account_url}','{container_name}','{blobpath}','{account_name}','{account_key}','{clickhouse_format}')" + f" azureBlobStorage('{storage_account_url}','{container_name}','{blobpath}','{account_name}','{account_key}','{clickhouse_format}','{compression}')" ) statement = f"INSERT INTO {qualified_table_name} {table_function}" elif not bucket_path: @@ -255,7 +247,9 @@ def __init__( settings={ "allow_experimental_lightweight_delete": 1, "allow_experimental_object_type": 1, + "enable_http_compression": 1, }, + compression=None if compression == "none" else compression, ) except clickhouse_connect.driver.exceptions.Error as e: raise LoadJobTerminalException( @@ -363,8 +357,6 @@ def _get_table_update_sql( else: sql[0] += "\nPRIMARY KEY tuple()" - # TODO: Apply sort order and cluster key hints. - return sql def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns]: diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 184205da78..0af8933ae7 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -134,6 +134,7 @@ def execute_query( query = ( "set allow_experimental_lightweight_delete = 1;" "set allow_experimental_object_type = 1;" + "set enable_http_compression= 1;" f"{query}" ) with self._conn.cursor() as cursor: diff --git a/tests/load/utils.py b/tests/load/utils.py index 947b6dbe3d..9ebdb0f8fa 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -175,8 +175,8 @@ def destinations_configs( destination_configs += [ DestinationTestConfiguration(destination=destination) for destination in SQL_DESTINATIONS - if destination not in ("athena", "mssql", "synapse", "databricks", "clickhouse") - if destination not in ("athena", "mssql", "synapse", "databricks", "dremio") + if destination + not in ("athena", "mssql", "synapse", "databricks", "dremio", "clickhouse") ] destination_configs += [ DestinationTestConfiguration(destination="duckdb", file_format="parquet") @@ -205,7 +205,17 @@ def destinations_configs( DestinationTestConfiguration( destination="clickhouse", file_format="jsonl", - disable_compression=True, + ) + ] + destination_configs += [ + DestinationTestConfiguration( + destination="clickhouse", + file_format="parquet", + ) + ] + destination_configs += [ + DestinationTestConfiguration( + destination="clickhouse", file_format="parquet", disable_compression=True ) ] destination_configs += [ @@ -326,7 +336,6 @@ def destinations_configs( file_format="parquet", bucket_url=GCS_BUCKET, extra_info="gcs-authorization", - disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -334,7 +343,6 @@ def destinations_configs( file_format="parquet", bucket_url=AWS_BUCKET, extra_info="s3-authorization", - disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -342,7 +350,6 @@ def destinations_configs( file_format="parquet", bucket_url=AZ_BUCKET, extra_info="az-authorization", - disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -350,7 +357,6 @@ def destinations_configs( file_format="jsonl", bucket_url=AZ_BUCKET, extra_info="az-authorization", - disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -358,7 +364,6 @@ def destinations_configs( file_format="jsonl", bucket_url=GCS_BUCKET, extra_info="gcs-authorization", - disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -366,7 +371,14 @@ def destinations_configs( file_format="jsonl", bucket_url=AWS_BUCKET, extra_info="s3-authorization", + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", + bucket_url=AWS_BUCKET, disable_compression=True, + extra_info="s3-authorization", ), DestinationTestConfiguration( destination="dremio", diff --git a/tests/utils.py b/tests/utils.py index 410c2363d3..710e95458d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -48,6 +48,7 @@ "destination", "synapse", "databricks", + "clickhouse", "dremio", } NON_SQL_DESTINATIONS = {"filesystem", "weaviate", "dummy", "motherduck", "qdrant", "destination"} From 6fee65edcf2789b955a277f759474e862f2d1b2f Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 8 Apr 2024 23:37:14 +0200 Subject: [PATCH 071/127] Merge Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 11 +- poetry.lock | 416 +++++++++++++++++- tests/load/utils.py | 35 +- 3 files changed, 444 insertions(+), 18 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index c1bb9223c0..b534b5f504 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -22,7 +22,7 @@ TLoadJobState, FollowupJob, LoadJob, - NewLoadJob, + NewLoadJob, StorageSchemaInfo, ) from dlt.common.schema import Schema, TColumnSchema from dlt.common.schema.typing import ( @@ -247,9 +247,8 @@ def __init__( settings={ "allow_experimental_lightweight_delete": 1, "allow_experimental_object_type": 1, - "enable_http_compression": 1, }, - compression=None if compression == "none" else compression, + # compression=None if compression == "none" else compression, ) except clickhouse_connect.driver.exceptions.Error as e: raise LoadJobTerminalException( @@ -387,10 +386,14 @@ def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns] schema_table[c[0]] = schema_c # type: ignore return True, schema_table - # ClickHouse fields are not nullable by default. + + def get_stored_schema(self) -> StorageSchemaInfo: + return super().get_stored_schema() + @staticmethod def _gen_not_null(v: bool) -> str: + # ClickHouse fields are not nullable by default. # We use the `Nullable` modifier instead of NULL / NOT NULL modifiers to cater for ALTER statement. pass diff --git a/poetry.lock b/poetry.lock index a05cb28c1b..7e3497fb90 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "about-time" @@ -1670,6 +1670,217 @@ files = [ click = ">=4.0" PyYAML = ">=3.11" +[[package]] +name = "clickhouse-connect" +version = "0.7.7" +description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" +optional = true +python-versions = "~=3.8" +files = [ + {file = "clickhouse-connect-0.7.7.tar.gz", hash = "sha256:c9ff4377ceefaa47c382a7372da4fdbf807f931000ad9b5bab850a60166b0d1c"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:233037de24cf1d9ed0bed0a507b970845e7f181da708198a0a033b05ae408969"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ea4deac8d35323d328155497ba51d80b54a20c2dd2d93b5fd6a1d8f92dca630"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e24036258ed72e3440be3b7302bbd110157bcdddc1a92e0f6829bba5eb10b7a"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80826f10017c07f00cac111fcdf0ac8ec4075d6ae718047cac6440ce3df9816e"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d72fec2df3996edfd0a7b2d9c51a99db8771f122537bd1ef963d3d7ccb315c"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:542c9fa4c92de8bcd77b8503b7ef778f32f307823eba37a3fb955e0b82fc8e7e"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57143505ab5c17391bdaaa6962d2800beebc7d0ab0dc23de5a097978394e75ea"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:716f13d77b9f82bbe52e428519dc00309ca185add10fa56662a5f07dbccda595"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-win32.whl", hash = "sha256:eae28d7b14df34fd853b3b8c6811b002d38e7d04015c034fefc2332dda9807af"}, + {file = "clickhouse_connect-0.7.7-cp310-cp310-win_amd64.whl", hash = "sha256:fe418891f755d39d82036c5b311ddb37f54bf331141a463b69089334a3b676c8"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5be44154f86d642fddeffe4466ad7fc53370468102a83ba3805a337693347210"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4a898d7dcb0e2c13a73d823569e3d5e8ef4f1a1b25ead0dc6be04ea277a2488"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d25256130e2601c0c4e815e7a05b3732713c7389170d18df36e2c37ed20e11"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f123122c34d2fb8a68911c70872be7db749d406e18fcd165e7cdfea45f372c20"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49392e83e4691aec1c3050cb0e7534cab196e0da23065adcfbe7f0d77523c586"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7fe3731800957c374b7d8b3c1f959f766f7946d478e0f3f208815935b9231dec"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2bc66286e5374e01f1df92d21293bdf40d5cabf664dabd6ea8f99ba495354c12"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a78794703bea0df09a5fe5965a69cc7f8044f72e8470efc123257dea77a06edf"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-win32.whl", hash = "sha256:5a5764a2ec7e1085136789e29ecc69dd19b799c071ec5eff63f7f13a82fcb1bf"}, + {file = "clickhouse_connect-0.7.7-cp311-cp311-win_amd64.whl", hash = "sha256:c43c693b8a360d948b0b8914b37b233d61e63286d921a753c7f8cfb96cff607f"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:355a14e500d7f99a2ab152886ae253104edf65ce32605d7670691d399527564f"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:865a9cb3aa823a8e5d4b73892b4cba810b514162fafe52f7c6a76abf79483dbd"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c175735b6c9ec884fe8d196cbda98bde53c7d376e8d7df1deac407d678f2250"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7e4ab3e949bcae660cc4d825416f45a840db70ea529f4055f47add7cc6a380c"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fb84c3cf9009c0192cc025908a215a83e322d8964116c40239407470e591025"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:41caca3ee5db49311a55b885a60e51f94ff29e8f56083a04db3383c5604013e0"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f99f89badd6ca8f731bc7bc9d5c30b43bcec9286446b57d640206573d15f8a37"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a6b1a072763e83d2670ac45f0e3ebd58f9aa2c430d12daf2cd7cdc7fc8049e35"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-win32.whl", hash = "sha256:95fa9482fab216806632e80c0f9ad3d28433fb73295afbb2fc56b8d437de70c5"}, + {file = "clickhouse_connect-0.7.7-cp312-cp312-win_amd64.whl", hash = "sha256:59b95cfb84795bf0c23a6d9eac43fbba6de0fb57ac8d12efdcf82e6408a2ff4c"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8616917cdacdd388c2678faf8989594a8db2fac35b324931aff18193b5e8e97e"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1bbc60823cfced80b2abc0042d515ad18f02900577b230c97203bac805120eb6"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ac16feb0a0510b5ee0cfcc898bdaa7f149e7dc250126bf302828fdec24189e"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf214483628740bac0801abba8d124408bfc21aeec26a97a81996975b6795f4a"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77f4160de7db18d8f3ba9ebce12c629be825e630a85380618bc2f4fe2fdcf565"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1eac642de2aeef60644944a9fae576fda4d6216a0b5880e50cc68459090bb0"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:12d31e3bd21fdc3b7c797a22ec94a41d584cd78ea925c6145ae83c74930a2675"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:79e5611a67d35cd6f444e9b715cb0bfbf216129462e1fe244dd27474c3e12ba5"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-win32.whl", hash = "sha256:00a53122c895ff9a364542a03c851c651331c986d601a6a3f0a8d64a63d3f33a"}, + {file = "clickhouse_connect-0.7.7-cp38-cp38-win_amd64.whl", hash = "sha256:4e826059be77957c695c0925f4f94f4111cc18e9cfa80798469e13d786d43034"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2d0fc9fca68451eddf84c0261be951b45d29b3cb0fe2775abd97f2963e52a5cc"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9fbc8c3460c9016357aa142bf1cef62e67c4e8e92344ecef50368507f551c4e"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1872550d0859608dfeef90fef80c0f21d19f390dd301562e9dfb4c58010a825c"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cceb3db2d3ce4bb94c21fc648af42aa3f29ea9c001f50cff862562d7a314f044"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06ab8705a60c2ba75be4e160eba97a073da5f3da7b2a8fe75e2a3c0f6d943ee8"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8d1f99db0d091dcd9d488a3cde76973048d678e2bd7408d46b11a9dc0cac8963"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7d696c31145a9373d71388f035b79cd5182b6931aa4663b4f22c9c339607022b"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0fac18dc51bbd9ed7c54271b6b9f45bdf4aeded51903da05d6250a6e851437c3"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-win32.whl", hash = "sha256:583c0928431cd0da4ad234a70935631035b13f99a746cb0e6b75876c890adb9c"}, + {file = "clickhouse_connect-0.7.7-cp39-cp39-win_amd64.whl", hash = "sha256:9440118da473b22bfb4c1e28de8f3ee03fb8cc5b30d3bcd47e60a2e07007f907"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6d5afcb747d562db33a8d89f82eb0b14a98d020553793650b8c7b0098cdd4228"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42655edd00c72f29ad493ae2c40b149285d174eb8c7f53570566f575b8bc1834"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c9a3827daa446d72f2085bebfd06f9b2922a17bc632bcb6874d9e015667f6d"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdc483d70d6c465667e509c67ebe90df1547f3c4dc40141215a23231b0f508b1"}, + {file = "clickhouse_connect-0.7.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ca9e774178758503d45c07f0b7a3c6e9873d40265057a1dcfb698913ddef743a"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4ea52cef11a3fa52aa4800b31cff1161719046775243b2f211b5dbb0e7b82cf2"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63fd602b8a5261713cec048a31e2983f6302be5fb3476f57ae38c6c827857b3e"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47c0d9fdeda44de81f5c5c87e3b978f1d9f39a22b3f4239f341d5dcff42f0e73"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4dee8e39d03a3092663272d601e3274b6b350332ae5cecb3909fbac411a2287"}, + {file = "clickhouse_connect-0.7.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d6fdc70fc0285556704aa3087cc443cefa6b679a72c1559a70cac3d31a2bd3bf"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c6faf19cf11f29986bd3ea568a86859fa3d492429268d6e2dd632d6cf48fe62b"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17d15ce6d21f8f8cd8ae9a76df0c2bc713e1741b42a9851d13ac12e450e63667"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b3b3dc9c25b4f5b93d79a338eb3092cde61cfb5e25b76b6456ffe01637b138"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcc6840d8204ea621b7cfaee79d873dbea8314a47ba39e05894f0338c05641be"}, + {file = "clickhouse_connect-0.7.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ea04b094cf3a2cb0335b6f46d895a5019d2eab0b2eb9a0abbbf939d473c36218"}, +] + +[package.dependencies] +certifi = "*" +lz4 = "*" +pytz = "*" +urllib3 = ">=1.26" +zstandard = "*" + +[package.extras] +arrow = ["pyarrow"] +numpy = ["numpy"] +orjson = ["orjson"] +pandas = ["pandas"] +sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] + +[[package]] +name = "clickhouse-driver" +version = "0.2.7" +description = "Python driver with native interface for ClickHouse" +optional = true +python-versions = ">=3.7, <4" +files = [ + {file = "clickhouse-driver-0.2.7.tar.gz", hash = "sha256:299cfbe6d561955d88eeab6e09f3de31e2f6daccc6fdd904a59e46357d2d28d9"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c44fefc2fd44f432d5b162bfe34ad76840137c34167d46a18c554a7c7c6e3566"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e018452a7bf8d8c0adf958afbc5b0d29e402fc09a1fb34e9186293eae57f3b4e"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff8b09f8b13df28d2f91ee3d0d2edd9589cbda76b74acf60669112219cea8c9d"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54aa91c9512fd5a73f038cae4f67ca2ff0b2f8a84de846179a31530936ef4e20"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8342a7ba31ccb393ee31dfd61173aa84c995b4ac0b44d404adc8463534233d5"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:199000f8adf38fade0b5a52c273a396168105539de741a18ba3e68d7fc06e0e6"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60a2a40602b207506e505cfb184a81cd4b752bde17153bc0b32c3931ddb792f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5db3a26b18146b2b0b06d3f32ce588af5afaa38c719daf6f9606981514228a8b"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5579a31da1f3cf49630e43fbbb11cab891b78161abdcb33908b79820b7cd3a23"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cc39f0fb761aed96917b0f55679174a50f9591afc0e696e745cd698ef822661f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9aa0f7c740e4e61886c6d388792c5d1a2084d4b5462e6dcfc24e30ca7e7f8e68"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2caee88b6eec7b33ddbccd24501ad99ff8ff2b0a6a4471945cbfb28947a9a791"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win32.whl", hash = "sha256:a4aef432cc7120a971eebb7ca2fddac4472e810b57e403d3a371b0c69cbb2bb0"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f307de7df6bc23ad5ec8a1ba1db157f4d14de673ddd4798f37790f23255605b0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbf3ca8919bf856ca6588669a863065fb732a32a6387095f64d19038fd99db9f"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab68b3d9b9d1386adfd3a57edd47b62858a145bf7ccc7f11b31d308195d966e5"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985a9d60044c5ad39c6e018b852c7105ec4ebfdf4c3abe23183b4867454e570a"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c94330054c8d92d2286898906f843f26e2f96fc2aa11a9a96a7b5593d299bf0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92938f55c8f797e50e624a4b96e685178d043cdf0ede306a7fd4e7dda19b8dfd"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bd53e9bf49c3013d06f9e6d2812872d44b150f7a2d1cf18e1498257d42330e"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f1f8ed5404e283a9ded499c33eade2423fdc15e31f8a711d75e91f890d0f70b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a398085e4a1766d907ac32c282d4172db38a44243bde303372396208d1cbf4bb"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fa1808593123b6056f93808f0afbc7938f06a8149cb4e381aa7b1a234c1d3c18"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:0512d54ae23bd4a69278e04f42b651d7c71b63ba6043e2c6bd97b11329692f99"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5bc2b67e7e68f74ccebf95a8b3a13f13a7c34b89b32c9813103221de14c06c8b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:04a37cdafc671cb796af3e566cef0aeb39111d82aebeecd9106a049434953b26"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win32.whl", hash = "sha256:019538c7c23e976538e5081dd2f77a8a40bf663c638a62d857ff05f42b0c9052"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5166643683584bc53fcadda73c65f6a9077feb472f3d167ecef1a1a7024973aa"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:59affab7b5a3c4aab5b6a730f606575efdefea213458de2eb14927ee4e0640f4"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcb93dd07fe65ac4f1a2bc0b8967911d4ad2152dbee000f025ea5cb575da5ecb"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55a48019b79181ae1ca90e980e74c5d413c3f8829f6744e2b056646c2d435a1a"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:507463c9157240fd7c3246781e8c30df8db3c80bf68925b36ff3ad4a80c4b924"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2d8d2295ee9e0cfab8ad77cb635a05da2160334b4f16ed8c3d00fbf39a2343"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e38c44546dcdb956b5ab0944cb3d51e8c98f816e75bab1a2254c478865bc6e7b"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6690a2bdd9e7531fe50b53193279f8b35cbcd5c5ee36c0fcc112518a7d24f16"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc6b4ba0a6467fd09021aa1d87a44fb4589600d61b010fca41e0dfffd0dee322"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:254bbd400eb87ff547a08755bc714f712e11f7a6d3ebbbb7aaa1dd454fb16d44"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7bbbe3f8b87fc1489bc15fa9c88cc9fac9d4d7d683d076f058c2c83e6ee422fd"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:745e5b18f0957d932151527f1523d0e516c199de8c589638e5f55ab2559886f3"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fa0357fb5f26149e3df86a117d3678329b85d8827b78a5a09bbf224d8dd4541"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win32.whl", hash = "sha256:ace652af7ca94ba3cb3a04a5c363e135dc5009f31d8201903e21db9d5daf2358"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:c0ba68489544df89e4138a14b0ec3e1e5eb102d5d3283a91d9b837c420c0ab97"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66267e4ba21fa66c97ce784a5de2202d3b7d4db3e50bfcdde92830a68f6fae30"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cf55c285b75c178487407721baef4980b3c6515c9c0c1a6c1ea8b001afe658e"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:deeb66bb56490db2157f199c6d9aa2c53f046677be430cc834fc1e74eec6e654"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dfe5b4020939abeeb407b4eead598c954b1573d2d2b4f174f793b196d378b9d9"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84d39506b5f8d86a1195ebde1c66aba168f34ebce6ebd828888f0625cac54774"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f93a27db2dcbbd3ecad36e8df4395d047cb7410e2dc69f6d037674e15442f4ee"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ebc29e501e47ecbfd44c89c0e5c87b2a722049d38b9e93fdd4bea510a82e16ac"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f9cc8c186fea09a94d89e5c9c4e8d05ec3a80e2f6d25673c48efec8117a13cfc"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0757dfde5410c42230b24825ea3ab904a78160520e5ceb953482e133e368733b"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c9f88818cf411f928c29ba295c677cd95773bd256b8490f5655fb489e0c6658c"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e19952f158ebe274c65ffeb294ba378d75048a48f31b77573948d606bed019d5"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:008b1f32c7c68564de8051482b72a5289b6933bca9d9b1ad1474dd448d6768ba"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:622933cc9834c39f03de5d43a12f13fc7133d31d6d2597e67866d4a549ca9e60"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:92540581e5b5f36d915f14d05c30244870fb123c74b38c645fa47663053c5471"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02dfadc6111b64e01c20b8c11266cab97d4f06685a392a183af437f2f1afb990"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ca17fece86fe85d97705024bec881978271931b3d00db273c9d63244f7d606"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76474f1315ca3ab484ae28ad085b8f756c8b9a755882f93912b2149290482033"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5c0ff12368b34aaf58dd948b0819e5b54d261911de334d3f048328dc9354013"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd441b17294e90e313b08fabf84fcc782c191d2b9b2a924f163928202db6fcc"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62aa158f61d7d84c58e8cd75b3b8340b28607e5a70132395078f578d518aaae3"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcb2a39a1fef8bf1b581f06125c2a84a5b92c939b079d1a95126e3207b05dc77"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f29cc641a65e89a51a15f6d195f565ad2761d1bd653408c6b4046c987c5fb99"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ac1a43690696bda46c9a23fc6fd79b6fe22d428a18e880bdbdf5e6aeb31008c5"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1dd5ea4584c42f85d96ddfa7d07da2abb35a797c45e4d3a66ace149ee4977cad"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a736c0af858a3c83af03848b18754ab18dc594cc7f3bf6be0b1fac682def182c"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win32.whl", hash = "sha256:6cb8ca47f5818c1bc5814b9ff775e383f3c50059b1fd28a02cb9be1b666929f8"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:a90e7dc92985669a5e6569356bb3028d9d475f95006d4487cb0789aa53f9489c"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04b77cd6c583da9135db4a62c5a7999ae248c2dbfc0cb8e8a3d8a853b1fbfa11"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c7671f8c0e8960d766b2e0eaefcae3088fccdd3920e9cd3dee8e344cfd0a6929"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:502d7cd28522b95a399e993ffd48487e8c12c50ce2d4e89b77b938f945304405"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:969739279f4010e7b5b6b2c9d2ab56a463aed11fdaed5e02424c1b3915f144f8"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed34b60f741eeb02407ea72180d77cbfc368c1be6fc2f2ff8319d1856ce67e10"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a667b48927f4420eb8c03fa33369edfbdf359a788897a01ac945263a2a611461"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f93aa3a90f3847872d7464ec9076482b2e812c4e7d61682daedffdf3471be00"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:190890667215691fdf2155c3b233b39146054ab1cd854c7d91221e6ed633d71e"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ff280aeac5e96c764cd31ba1077c95601337b9a97fb0b9ed4d24c64431f2c322"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01e63e35d2ab55b8eb48facf6e951968c80d27ee6703aa6c91c73d9d0a4d0efe"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a29fb24b910dafc8c11ba882797d13ec0323a97dce80a57673116fa893d1b669"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5f229a7853fc767e63143ea69889d49f6fd5623adc2f7b0f7eb360117d7e91a5"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win32.whl", hash = "sha256:b7f34ad2ed509f48f8ed1f9b96e89765173a7b35d286c7350aa85934a11c0f49"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:78b166597afbe490cc0cdac44fed8c8b81668f87125601dda17b154f237eef5d"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:16ab64beb8d079cb9b3200539539a35168f524eedf890c9acefb719e25bdc96e"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03e28fd50fc7c54874bf8e638a2ea87f73ae35bfbbf90123fdb395f38d62f159"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0677b8350acd8d186b6acd0026b62dd262d6fee428a5fa3ad9561908d4b02c39"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2f3c9e2182809131701bb28a606dec90525c7ab20490714714a4b3eb015454b"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e03a1a1b30cc58c9bd2cbe25bf5e40b1f1d16d52d44ddefb3af50435d1ed613c"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a1be8081306a4beb12444ed8e3208e1eb6c01ed207c471b33009c13504c88139"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:933b40722cbca9b1123a5bb2fb4bafafd234deae0f3481125cb6b6fa1d39aa84"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054b5022f9bf15a5f4663a7cd190f466e70a2d7b8d45429d8742c515b556c10"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61744760ee046c9a268cb801ca21bfe44c4873db9901a7cd0f3ca8830205feff"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5e28427e05a72e7a4c3672e36703a2d80107ee0b3ab537e3380d726c96b07821"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c483f5ec836ae87803478f2a7b9daf15343078edd6a8be7364dd9db64905bbd0"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28220b794874e68bc2f06dbfff5748f1c5a3236922f59e127abd58d44ae20a3f"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c09877b59b34d5b3043ad70ec31543173cac8b64b4a8afaa89416b22fb28da5"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3580f78db27119f7380627873214ae1342066f1ecb35700c1d7bf418dd70ae73"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0842ac1b2f7a9ca46dac2027849b241bccd8eb8ff1c59cb0a5874042b267b733"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7a3fb585e2d3514196258a4a3b0267510c03477f3c2380239ade4c056ba689a7"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48ea25287566d45efbaee0857ad25e8b33ffd7fd73e89424d79fe7f532962915"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee4a4935667b59b4816a5ca77300f5dbe5a7416860551d17376426b8fefc1175"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:358058cfceea9b43c4af9de81842563746f16984b34525a15b41eacf8fc2bed2"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ae760fb843dec0b5c398536ca8dfaf243f494ba8fc68132ae1bd62004b0c396a"}, +] + +[package.dependencies] +pytz = "*" +tzlocal = "*" + +[package.extras] +lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"] +numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] +zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] + [[package]] name = "colorama" version = "0.4.6" @@ -3357,6 +3568,114 @@ files = [ {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6c9f64b9724ec38da8e514f404ac64e9a6a5e8b1d7031c2dadd05c1f4c16fd"}, {file = "google_re2-1.1-1-cp39-cp39-win32.whl", hash = "sha256:d1b751b9ab9f8e2ab2a36d72b909281ce65f328c9115a1685acae1a2d1afd7a4"}, {file = "google_re2-1.1-1-cp39-cp39-win_amd64.whl", hash = "sha256:ac775c75cec7069351d201da4e0fb0cae4c1c5ebecd08fa34e1be89740c1d80b"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5eaefe4705b75ca5f78178a50104b689e9282f868e12f119b26b4cffc0c7ee6e"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:e35f2c8aabfaaa4ce6420b3cae86c0c29042b1b4f9937254347e9b985694a171"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:35fd189cbaaaa39c9a6a8a00164c8d9c709bacd0c231c694936879609beff516"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:60475d222cebd066c80414831c8a42aa2449aab252084102ee05440896586e6a"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:871cb85b9b0e1784c983b5c148156b3c5314cb29ca70432dff0d163c5c08d7e5"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:94f4e66e34bdb8de91ec6cdf20ba4fa9fea1dfdcfb77ff1f59700d01a0243664"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1563577e2b720d267c4cffacc0f6a2b5c8480ea966ebdb1844fbea6602c7496f"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49b7964532a801b96062d78c0222d155873968f823a546a3dbe63d73f25bb56f"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2362fd70eb639a75fd0187d28b4ba7b20b3088833d8ad7ffd8693d0ba159e1c2"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86b80719636a4e21391e20a9adf18173ee6ae2ec956726fe2ff587417b5e8ba6"}, + {file = "google_re2-1.1-2-cp310-cp310-win32.whl", hash = "sha256:5456fba09df951fe8d1714474ed1ecda102a68ddffab0113e6c117d2e64e6f2b"}, + {file = "google_re2-1.1-2-cp310-cp310-win_amd64.whl", hash = "sha256:2ac6936a3a60d8d9de9563e90227b3aea27068f597274ca192c999a12d8baa8f"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5a87b436028ec9b0f02fe19d4cbc19ef30441085cdfcdf1cce8fbe5c4bd5e9a"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:fc0d4163de9ed2155a77e7a2d59d94c348a6bbab3cff88922fab9e0d3d24faec"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:48b12d953bc796736e7831d67b36892fb6419a4cc44cb16521fe291e594bfe23"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:62c780c927cff98c1538439f0ff616f48a9b2e8837c676f53170d8ae5b9e83cb"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:04b2aefd768aa4edeef8b273327806c9cb0b82e90ff52eacf5d11003ac7a0db2"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:9c90175992346519ee7546d9af9a64541c05b6b70346b0ddc54a48aa0d3b6554"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22ad9ad9d125249d6386a2e80efb9de7af8260b703b6be7fa0ab069c1cf56ced"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70971f6ffe5254e476e71d449089917f50ebf9cf60f9cec80975ab1693777e2"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f267499529e64a4abed24c588f355ebe4700189d434d84a7367725f5a186e48d"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b632eff5e4cd44545a9c0e52f2e1becd55831e25f4dd4e0d7ec8ee6ca50858c1"}, + {file = "google_re2-1.1-2-cp311-cp311-win32.whl", hash = "sha256:a42c733036e8f242ee4e5f0e27153ad4ca44ced9e4ce82f3972938ddee528db0"}, + {file = "google_re2-1.1-2-cp311-cp311-win_amd64.whl", hash = "sha256:64f8eed4ca96905d99b5286b3d14b5ca4f6a025ff3c1351626a7df2f93ad1ddd"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5541efcca5b5faf7e0d882334a04fa479bad4e7433f94870f46272eec0672c4a"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92309af35b6eb2d3b3dc57045cdd83a76370958ab3e0edd2cc4638f6d23f5b32"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:197cd9bcaba96d18c5bf84d0c32fca7a26c234ea83b1d3083366f4392cb99f78"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:1b896f171d29b541256cf26e10dccc9103ac1894683914ed88828ca6facf8dca"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:e022d3239b945014e916ca7120fee659b246ec26c301f9e0542f1a19b38a8744"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:2c73f8a9440873b68bee1198094377501065e85aaf6fcc0d2512c7589ffa06ca"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:901d86555bd7725506d651afaba7d71cd4abd13260aed6cfd7c641a45f76d4f6"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce4710ff636701cfb56eb91c19b775d53b03749a23b7d2a5071bbbf4342a9067"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76a20e5ebdf5bc5d430530197e42a2eeb562f729d3a3fb51f39168283d676e66"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77c9f4d4bb1c8de9d2642d3c4b8b615858ba764df025b3b4f1310266f8def269"}, + {file = "google_re2-1.1-2-cp38-cp38-win32.whl", hash = "sha256:94bd60785bf37ef130a1613738e3c39465a67eae3f3be44bb918540d39b68da3"}, + {file = "google_re2-1.1-2-cp38-cp38-win_amd64.whl", hash = "sha256:59efeb77c0dcdbe37794c61f29c5b1f34bc06e8ec309a111ccdd29d380644d70"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:221e38c27e1dd9ccb8e911e9c7aed6439f68ce81e7bb74001076830b0d6e931d"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d9145879e6c2e1b814445300b31f88a675e1f06c57564670d95a1442e8370c27"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:c8a12f0740e2a52826bdbf95569a4b0abdf413b4012fa71e94ad25dd4715c6e5"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9c9998f71466f4db7bda752aa7c348b2881ff688e361108fe500caad1d8b9cb2"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:0c39f69b702005963a3d3bf78743e1733ad73efd7e6e8465d76e3009e4694ceb"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:6d0ce762dee8d6617d0b1788a9653e805e83a23046c441d0ea65f1e27bf84114"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ecf3619d98c9b4a7844ab52552ad32597cdbc9a5bdbc7e3435391c653600d1e2"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a1426a8cbd1fa004974574708d496005bd379310c4b1c7012be4bc75efde7a8"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1a30626ba48b4070f3eab272d860ef1952e710b088792c4d68dddb155be6bfc"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b9c1ffcfbc3095b6ff601ec2d2bf662988f6ea6763bc1c9d52bec55881f8fde"}, + {file = "google_re2-1.1-2-cp39-cp39-win32.whl", hash = "sha256:32ecf995a252c0548404c1065ba4b36f1e524f1f4a86b6367a1a6c3da3801e30"}, + {file = "google_re2-1.1-2-cp39-cp39-win_amd64.whl", hash = "sha256:e7865410f3b112a3609739283ec3f4f6f25aae827ff59c6bfdf806fd394d753e"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3b21f83f0a201009c56f06fcc7294a33555ede97130e8a91b3f4cae01aed1d73"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b38194b91354a38db1f86f25d09cdc6ac85d63aee4c67b43da3048ce637adf45"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e7da3da8d6b5a18d6c3b61b11cc5b66b8564eaedce99d2312b15b6487730fc76"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:aeca656fb10d8638f245331aabab59c9e7e051ca974b366dd79e6a9efb12e401"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:2069d6dc94f5fa14a159bf99cad2f11e9c0f8ec3b7f44a4dde9e59afe5d1c786"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:2319a39305a4931cb5251451f2582713418a19bef2af7adf9e2a7a0edd939b99"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb98fc131699756c6d86246f670a5e1c1cc1ba85413c425ad344cb30479b246c"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6e038986d8ffe4e269f8532f03009f229d1f6018d4ac0dabc8aff876338f6e0"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8618343ee658310e0f53bf586fab7409de43ce82bf8d9f7eb119536adc9783fd"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8140ca861cfe00602319cefe2c7b8737b379eb07fb328b51dc44584f47a2718"}, + {file = "google_re2-1.1-3-cp310-cp310-win32.whl", hash = "sha256:41f439c5c54e8a3a0a1fa2dbd1e809d3f643f862df7b16dd790f36a1238a272e"}, + {file = "google_re2-1.1-3-cp310-cp310-win_amd64.whl", hash = "sha256:fe20e97a33176d96d3e4b5b401de35182b9505823abea51425ec011f53ef5e56"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c39ff52b1765db039f690ee5b7b23919d8535aae94db7996079fbde0098c4d7"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5420be674fd164041639ba4c825450f3d4bd635572acdde16b3dcd697f8aa3ef"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ff53881cf1ce040f102a42d39db93c3f835f522337ae9c79839a842f26d97733"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:8d04600b0b53523118df2e413a71417c408f20dee640bf07dfab601c96a18a77"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:c4835d4849faa34a7fa1074098d81c420ed6c0707a3772482b02ce14f2a7c007"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:3309a9b81251d35fee15974d0ae0581a9a375266deeafdc3a3ac0d172a742357"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e2b51cafee7e0bc72d0a4a454547bd8f257cde412ac9f1a2dc46a203b5e42cf4"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:83f5f1cb52f832c2297d271ee8c56cf5e9053448162e5d2223d513f729bad908"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55865a1ace92be3f7953b2e2b38b901d8074a367aa491daee43260a53a7fc6f0"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cec2167dd142e583e98c783bd0d28b8cf5a9cdbe1f7407ba4163fe3ccb613cb9"}, + {file = "google_re2-1.1-3-cp311-cp311-win32.whl", hash = "sha256:a0bc1fe96849e4eb8b726d0bba493f5b989372243b32fe20729cace02e5a214d"}, + {file = "google_re2-1.1-3-cp311-cp311-win_amd64.whl", hash = "sha256:e6310a156db96fc5957cb007dd2feb18476898654530683897469447df73a7cd"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e63cd10ea006088b320e8c5d308da1f6c87aa95138a71c60dd7ca1c8e91927e"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:12b566830a334178733a85e416b1e0507dbc0ceb322827616fe51ef56c5154f1"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:442e18c9d46b225c1496919c16eafe8f8d9bb4091b00b4d3440da03c55bbf4ed"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:c54c00263a9c39b2dacd93e9636319af51e3cf885c080b9680a9631708326460"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:15a3caeeb327bc22e0c9f95eb76890fec8874cacccd2b01ff5c080ab4819bbec"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:59ec0d2cced77f715d41f6eafd901f6b15c11e28ba25fe0effdc1de554d78e75"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:185bf0e3441aed3840590f8e42f916e2920d235eb14df2cbc2049526803d3e71"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:586d3f2014eea5be14d8de53374d9b79fa99689160e00efa64b5fe93af326087"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc2575082de4ffd234d9607f3ae67ca22b15a1a88793240e2045f3b3a36a5795"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:59c5ad438eddb3630def394456091284d7bbc5b89351987f94f3792d296d1f96"}, + {file = "google_re2-1.1-3-cp312-cp312-win32.whl", hash = "sha256:5b9878c53f2bf16f75bf71d4ddd57f6611351408d5821040e91c53ebdf82c373"}, + {file = "google_re2-1.1-3-cp312-cp312-win_amd64.whl", hash = "sha256:4fdecfeb213110d0a85bad335a8e7cdb59fea7de81a4fe659233f487171980f9"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2dd87bacab32b709c28d0145fe75a956b6a39e28f0726d867375dba5721c76c1"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:55d24c61fe35dddc1bb484593a57c9f60f9e66d7f31f091ef9608ed0b6dde79f"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a0cf1180d908622df648c26b0cd09281f92129805ccc56a39227fdbfeab95cb4"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:09586f07f3f88d432265c75976da1c619ab7192cd7ebdf53f4ae0776c19e4b56"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:539f1b053402203576e919a06749198da4ae415931ee28948a1898131ae932ce"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:abf0bcb5365b0e27a5a23f3da403dffdbbac2c0e3a3f1535a8b10cc121b5d5fb"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:19c83e5bbed7958213eeac3aa71c506525ce54faf03e07d0b96cd0a764890511"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3348e77330ff672dc44ec01894fa5d93c409a532b6d688feac55e714e9059920"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06b63edb57c5ce5a13eabfd71155e346b9477dc8906dec7c580d4f70c16a7e0d"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12fe57ba2914092b83338d61d8def9ebd5a2bd0fd8679eceb5d4c2748105d5c0"}, + {file = "google_re2-1.1-3-cp38-cp38-win32.whl", hash = "sha256:80796e08d24e606e675019fe8de4eb5c94bb765be13c384f2695247d54a6df75"}, + {file = "google_re2-1.1-3-cp38-cp38-win_amd64.whl", hash = "sha256:3c2257dedfe7cc5deb6791e563af9e071a9d414dad89e37ac7ad22f91be171a9"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43a0cd77c87c894f28969ac622f94b2e6d1571261dfdd785026848a25cfdc9b9"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1038990b77fd66f279bd66a0832b67435ea925e15bb59eafc7b60fdec812b616"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fb5dda6875d18dd45f0f24ebced6d1f7388867c8fb04a235d1deab7ea479ce38"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb1d164965c6d57a351b421d2f77c051403766a8b75aaa602324ee2451fff77f"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:a072ebfa495051d07ffecbf6ce21eb84793568d5c3c678c00ed8ff6b8066ab31"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:4eb66c8398c8a510adc97978d944b3b29c91181237218841ea1a91dc39ec0e54"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f7c8b57b1f559553248d1757b7fa5b2e0cc845666738d155dff1987c2618264e"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9162f6aa4f25453c682eb176f21b8e2f40205be9f667e98a54b3e1ff10d6ee75"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2d65ddf67fd7bf94705626871d463057d3d9a3538d41022f95b9d8f01df36e1"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d140c7b9395b4d1e654127aa1c99bcc603ed01000b7bc7e28c52562f1894ec12"}, + {file = "google_re2-1.1-3-cp39-cp39-win32.whl", hash = "sha256:80c5fc200f64b2d903eeb07b8d6cefc620a872a0240c7caaa9aca05b20f5568f"}, + {file = "google_re2-1.1-3-cp39-cp39-win_amd64.whl", hash = "sha256:9eb6dbcee9b5dc4069bbc0634f2eb039ca524a14bed5868fdf6560aaafcbca06"}, ] [[package]] @@ -4223,10 +4542,13 @@ files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -4235,6 +4557,7 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -4254,6 +4577,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -4263,6 +4587,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -4272,6 +4597,7 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -4281,6 +4607,7 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -4291,13 +4618,16 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -6434,6 +6764,7 @@ files = [ {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab6bcc8e424e07c1d4ba6df96f7fb963bcb48f590b9456de9ebd03b88084fe8"}, {file = "pymongo-4.6.0-cp312-cp312-win32.whl", hash = "sha256:47aa128be2e66abd9d1a9b0437c62499d812d291f17b55185cb4aa33a5f710a4"}, {file = "pymongo-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:014e7049dd019a6663747ca7dae328943e14f7261f7c1381045dfc26a04fa330"}, + {file = "pymongo-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e24025625bad66895b1bc3ae1647f48f0a92dd014108fb1be404c77f0b69ca67"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:288c21ab9531b037f7efa4e467b33176bc73a0c27223c141b822ab4a0e66ff2a"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:747c84f4e690fbe6999c90ac97246c95d31460d890510e4a3fa61b7d2b87aa34"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:055f5c266e2767a88bb585d01137d9c7f778b0195d3dbf4a487ef0638be9b651"}, @@ -6870,7 +7201,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -8408,6 +8738,24 @@ files = [ {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = true +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "uc-micro-py" version = "1.0.2" @@ -8828,11 +9176,73 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +[[package]] +name = "zstandard" +version = "0.22.0" +description = "Zstandard bindings for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"}, + {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"}, + {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"}, + {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"}, + {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"}, + {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"}, + {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"}, + {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"}, + {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"}, + {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"}, + {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"}, + {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"}, + {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + [extras] athena = ["botocore", "pyarrow", "pyathena", "s3fs"] az = ["adlfs"] bigquery = ["gcsfs", "google-cloud-bigquery", "grpcio", "pyarrow"] cli = ["cron-descriptor", "pipdeptree"] +clickhouse = ["adlfs", "clickhouse-connect", "clickhouse-driver", "gcsfs", "pyarrow", "s3fs"] databricks = ["databricks-sql-connector"] dbt = ["dbt-athena-community", "dbt-bigquery", "dbt-core", "dbt-databricks", "dbt-duckdb", "dbt-redshift", "dbt-snowflake"] dremio = ["pyarrow"] @@ -8854,4 +9264,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "1a0cf7f14a3c3846981cb5fdddd2a17b8f9ba2024ad6f6f8f3413dc40497122e" +content-hash = "f2397d9315e4a76a1abd82102bc527d18da77427f01d2216e6df12168294a5ec" diff --git a/tests/load/utils.py b/tests/load/utils.py index 9ebdb0f8fa..aba3fc9b15 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -207,17 +207,6 @@ def destinations_configs( file_format="jsonl", ) ] - destination_configs += [ - DestinationTestConfiguration( - destination="clickhouse", - file_format="parquet", - ) - ] - destination_configs += [ - DestinationTestConfiguration( - destination="clickhouse", file_format="parquet", disable_compression=True - ) - ] destination_configs += [ DestinationTestConfiguration( destination="databricks", @@ -348,6 +337,29 @@ def destinations_configs( destination="clickhouse", staging="filesystem", file_format="parquet", + bucket_url=AWS_BUCKET, + extra_info="s3-authorization", + disable_compression=True + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="parquet", + bucket_url=AZ_BUCKET, + extra_info="az-authorization", + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="parquet", + bucket_url=AZ_BUCKET, + extra_info="az-authorization", + disable_compression=True + ), + DestinationTestConfiguration( + destination="clickhouse", + staging="filesystem", + file_format="jsonl", bucket_url=AZ_BUCKET, extra_info="az-authorization", ), @@ -357,6 +369,7 @@ def destinations_configs( file_format="jsonl", bucket_url=AZ_BUCKET, extra_info="az-authorization", + disable_compression=True ), DestinationTestConfiguration( destination="clickhouse", From 7c0ac80f0bcc07aaf4c008294a623a397d9d36fa Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 9 Apr 2024 11:27:50 +0200 Subject: [PATCH 072/127] Fix table-name separator config resolution #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 5 ++--- dlt/destinations/impl/clickhouse/configuration.py | 5 ++++- dlt/destinations/impl/clickhouse/sql_client.py | 7 ++++--- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index b534b5f504..307bbaaf46 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -22,7 +22,8 @@ TLoadJobState, FollowupJob, LoadJob, - NewLoadJob, StorageSchemaInfo, + NewLoadJob, + StorageSchemaInfo, ) from dlt.common.schema import Schema, TColumnSchema from dlt.common.schema.typing import ( @@ -386,11 +387,9 @@ def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns] schema_table[c[0]] = schema_c # type: ignore return True, schema_table - def get_stored_schema(self) -> StorageSchemaInfo: return super().get_stored_schema() - @staticmethod def _gen_not_null(v: bool) -> str: # ClickHouse fields are not nullable by default. diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 88b01d1cc8..562e0ca2e4 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -34,6 +34,8 @@ class ClickHouseCredentials(ConnectionStringCredentials): """Timeout for establishing connection. Defaults to 10 seconds.""" send_receive_timeout: int = 300 """Timeout for sending and receiving data. Defaults to 300 seconds.""" + dataset_table_separator: str = "___" + """Separator for dataset table names, defaults to '___', i.e. 'database.dataset___table'.""" __config_gen_annotations__: ClassVar[List[str]] = [ "host", @@ -44,6 +46,7 @@ class ClickHouseCredentials(ConnectionStringCredentials): "secure", "connect_timeout", "send_receive_timeout", + "dataset_table_separator", ] def parse_native_representation(self, native_value: Any) -> None: @@ -72,7 +75,7 @@ def to_url(self) -> URL: class ClickHouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): destination_type: Final[str] = "clickhouse" # type: ignore[misc] credentials: ClickHouseCredentials # type: ignore - dataset_name: Final[str] = "" # type: ignore + dataset_name: Final[str] = "dlt" # type: ignore """dataset name in the destination to load data to, for schemas that are not default schema, it is used as dataset prefix""" # Primary key columns are used to build a sparse primary index which allows for efficient data retrieval, diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 0af8933ae7..fd3c9a401b 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -53,6 +53,9 @@ def __init__(self, dataset_name: str, credentials: ClickHouseCredentials) -> Non self.credentials = credentials self.database_name = credentials.database + def has_dataset(self) -> bool: + return super().has_dataset() + def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: self._conn = clickhouse_driver.dbapi.connect( dsn=self.credentials.to_native_representation() @@ -157,9 +160,7 @@ def fully_qualified_dataset_name(self, escape: bool = True) -> str: return f"{database_name}.{dataset_name}" def make_qualified_table_name(self, table_name: str, escape: bool = True) -> str: - dataset_table_separator = dlt.config[ - "destination.clickhouse.credentials.dataset_table_separator" - ] + dataset_table_separator = self.credentials.dataset_table_separator if escape: database_name = self.capabilities.escape_identifier(self.database_name) dataset_and_table = self.capabilities.escape_identifier( From 411ff77477de67b7b4509dcd0456271b2443bbb9 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 9 Apr 2024 11:28:03 +0200 Subject: [PATCH 073/127] Format Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/bigquery/bigquery.py | 4 +++- tests/load/utils.py | 6 +++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/dlt/destinations/impl/bigquery/bigquery.py b/dlt/destinations/impl/bigquery/bigquery.py index 03c42d5b0c..86448bd011 100644 --- a/dlt/destinations/impl/bigquery/bigquery.py +++ b/dlt/destinations/impl/bigquery/bigquery.py @@ -363,7 +363,9 @@ def prepare_load_table( def _get_column_def_sql(self, column: TColumnSchema, table_format: TTableFormat = None) -> str: name = self.capabilities.escape_identifier(column["name"]) - column_def_sql = f"{name} {self.type_mapper.to_db_type(column, table_format)} {self._gen_not_null(column.get('nullable', True))}" + column_def_sql = ( + f"{name} {self.type_mapper.to_db_type(column, table_format)} {self._gen_not_null(column.get('nullable', True))}" + ) if column.get(ROUND_HALF_EVEN_HINT, False): column_def_sql += " OPTIONS (rounding_mode='ROUND_HALF_EVEN')" if column.get(ROUND_HALF_AWAY_FROM_ZERO_HINT, False): diff --git a/tests/load/utils.py b/tests/load/utils.py index aba3fc9b15..1811c1edf2 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -339,7 +339,7 @@ def destinations_configs( file_format="parquet", bucket_url=AWS_BUCKET, extra_info="s3-authorization", - disable_compression=True + disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -354,7 +354,7 @@ def destinations_configs( file_format="parquet", bucket_url=AZ_BUCKET, extra_info="az-authorization", - disable_compression=True + disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -369,7 +369,7 @@ def destinations_configs( file_format="jsonl", bucket_url=AZ_BUCKET, extra_info="az-authorization", - disable_compression=True + disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", From e30ce6dd9ceae06c8cf8ea60ca4c3350b783abf5 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 9 Apr 2024 23:25:16 +0200 Subject: [PATCH 074/127] Set compression parameter for local #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 2 +- dlt/destinations/impl/clickhouse/configuration.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 307bbaaf46..6cf3d9b49f 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -249,7 +249,7 @@ def __init__( "allow_experimental_lightweight_delete": 1, "allow_experimental_object_type": 1, }, - # compression=None if compression == "none" else compression, + compression=None if compression == "none" else compression, ) except clickhouse_connect.driver.exceptions.Error as e: raise LoadJobTerminalException( diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 562e0ca2e4..075150538d 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -75,7 +75,7 @@ def to_url(self) -> URL: class ClickHouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): destination_type: Final[str] = "clickhouse" # type: ignore[misc] credentials: ClickHouseCredentials # type: ignore - dataset_name: Final[str] = "dlt" # type: ignore + dataset_name: Final[str] = "" # type: ignore """dataset name in the destination to load data to, for schemas that are not default schema, it is used as dataset prefix""" # Primary key columns are used to build a sparse primary index which allows for efficient data retrieval, From 3c89b3bbc8284d2cbd794b8dc24d0de0bfd2f32f Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 10 Apr 2024 00:18:26 +0200 Subject: [PATCH 075/127] Set compression method to 'auto' for s3 table function #1055 Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 6cf3d9b49f..c4e7b9c02e 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -194,10 +194,11 @@ def __init__( secret_access_key = None clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] + structure = "auto" template = Template(""" SELECT * FROM s3('{{ url }}'{% if access_key_id and secret_access_key %}, - '{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}') + '{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}','{{ structure }}','{{ compression }}') """) table_function = template.render( @@ -205,6 +206,8 @@ def __init__( access_key_id=access_key_id, secret_access_key=secret_access_key, clickhouse_format=clickhouse_format, + structure=structure, + compression=compression, ).strip() statement = f"INSERT INTO {qualified_table_name} {table_function}" From df7fcf26f6e7ec1f6bd92b2dadae6c7579e76c4f Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 10 Apr 2024 00:27:51 +0200 Subject: [PATCH 076/127] Typing Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/factory.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 6f2de84db7..3dd13f270b 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -1,7 +1,5 @@ import typing as t -from clickhouse_driver.dbapi import Connection # type: ignore[import-untyped] - from dlt.common.destination import Destination, DestinationCapabilitiesContext from dlt.destinations.impl.clickhouse import capabilities from dlt.destinations.impl.clickhouse.configuration import ( @@ -12,9 +10,9 @@ if t.TYPE_CHECKING: from dlt.destinations.impl.clickhouse.clickhouse import ClickHouseClient + from clickhouse_driver.dbapi import Connection # type: ignore[import-untyped] -# noinspection PyPep8Naming class clickhouse(Destination[ClickHouseClientConfiguration, "ClickHouseClient"]): spec = ClickHouseClientConfiguration From 743cc05fd2be06976322b7253f4fed8e27fd28eb Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 10 Apr 2024 00:32:27 +0200 Subject: [PATCH 077/127] Typing Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/factory.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/factory.py b/dlt/destinations/impl/clickhouse/factory.py index 3dd13f270b..e5b8fc0e6a 100644 --- a/dlt/destinations/impl/clickhouse/factory.py +++ b/dlt/destinations/impl/clickhouse/factory.py @@ -27,7 +27,9 @@ def client_class(self) -> t.Type["ClickHouseClient"]: def __init__( self, - credentials: t.Union[ClickHouseCredentials, str, t.Dict[str, t.Any], Connection] = None, + credentials: t.Union[ + ClickHouseCredentials, str, t.Dict[str, t.Any], t.Type["Connection"] + ] = None, destination_name: str = None, environment: str = None, **kwargs: t.Any, From 3f203812b85bfc54eca8f1bb7525d1cb7b037a88 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 10 Apr 2024 21:13:32 +0200 Subject: [PATCH 078/127] Initial draft doc Signed-off-by: Marcel Coetzee --- .../dlt-ecosystem/destinations/clickhouse.md | 142 ++++++++++++++++++ 1 file changed, 142 insertions(+) create mode 100644 docs/website/docs/dlt-ecosystem/destinations/clickhouse.md diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md new file mode 100644 index 0000000000..f806a449e8 --- /dev/null +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -0,0 +1,142 @@ +--- +title: ClickHouse +description: ClickHouse `dlt` destination +keywords: [ clickhouse, destination, data warehouse ] +--- + +# ClickHouse + +## Install dlt with ClickHouse + +**To install the DLT library with ClickHouse dependencies:** + +```sh +pip install dlt[clickhouse] +``` + +## Setup Guide + +### 1. Initialize the dlt project + +Let's start by initializing a new `dlt` project as follows: + +```sh +dlt init chess clickhouse +``` + +> 💡 This command will initialize your pipeline with chess as the source and ClickHouse as the destination. + +The above command generates several files and directories, including `.dlt/secrets.toml` and a requirements file for ClickHouse. You can install the necessary dependencies specified in the +requirements file by executing it as follows: + +```sh +pip install -r requirements.txt +``` + +or with `pip install dlt[clickhouse]`, which installs the `dlt` library and the necessary dependencies for working with ClickHouse as a destination. + +### 2. Setup ClickHouse database + +To load data into ClickHouse, you need to create a ClickHouse database. While we recommend asking our GPT-4 assistant for details, we have provided a general outline of the process below: + +1. You can use an existing ClickHouse database or create a new one. + +2. To create a new database, connect to your ClickHouse server using the `clickhouse-client` command line tool or a SQL client of your choice. + +3. Run the following SQL command to create a new database: + + ```sql + CREATE DATABASE IF NOT EXISTS dlt_data; + ``` + +### 3. Add credentials + +1. Next, set up the ClickHouse credentials in the `.dlt/secrets.toml` file as shown below: + + ```toml + [destination.clickhouse.credentials] + database = "dlt_data" # the database name you created + username = "default" # ClickHouse username, default is usually "default" + password = "" # ClickHouse password if any + host = "localhost" # ClickHouse server host + port = 9000 # ClickHouse HTTP port, default is 9000 + secure = false # set to true if using HTTPS + ``` + +2. You can pass a database connection string similar to the one used by the `clickhouse-driver` library. The credentials above will look like this: + + ```toml + destination.clickhouse.credentials="clickhouse://default:password@localhost/dlt_data?secure=false" + ``` + +## Write disposition + +All [write dispositions](../../general-usage/incremental-loading#choosing-a-write-disposition) are supported. + +## Data loading + +Data is loaded into ClickHouse using the most efficient method depending on the data source: + +- For local files, the `clickhouse-connect` library is used to directly load files into ClickHouse tables using the `INSERT` command. + +- For files in remote storage like S3, Google Cloud Storage, or Azure Blob Storage, ClickHouse table functions like `s3`, `gcs` and `azureBlobStorage` are used to read the files and insert the data + into tables. + +## Supported file formats + +- [jsonl](../file-formats/jsonl.md) is the preferred format for both direct loading and staging. +- [parquet](../file-formats/parquet.md) is also supported for both direct loading and staging. + +## Supported column hints + +ClickHouse supports the following [column hints](https://dlthub.com/docs/general-usage/schema#tables-and-columns): + +- `primary_key` - marks the column as part of the primary key. Multiple columns can have this hint to create a composite primary key. + +## Table Engine + +By default, tables are created using the `ReplicatedMergeTree` table engine in ClickHouse. You can specify an alternate table engine using the `table_engine_type` hint on the resource: + +```py +@dlt.resource(table_engine_type="merge_tree") +def my_resource(): + ... +``` + +Supported values are: + +- `merge_tree` - creates tables using the `MergeTree` engine +- `replicated_merge_tree` (default) - creates tables using the `ReplicatedMergeTree` engine + +## Staging support + +ClickHouse supports Amazon S3, Google Cloud Storage and Azure Blob Storage as file staging destinations. + +`dlt` will upload Parquet or JSONL files to the staging location and use ClickHouse table functions to load the data directly from the staged files. + +Please refer to the filesystem documentation to learn how to configure credentials for the staging destinations: + +- [Amazon S3](./filesystem.md#aws-s3) +- [Google Cloud Storage](./filesystem.md#google-storage) +- [Azure Blob Storage](./filesystem.md#azure-blob-storage) + +To run a pipeline with staging enabled: + +```py +pipeline = dlt.pipeline( + pipeline_name='chess_pipeline', + destination='clickhouse', + staging='filesystem', # add this to activate staging + dataset_name='chess_data' +) +``` + +### dbt support + +Integration with [dbt](../transformations/dbt/dbt.md) is currently not supported. + +### Syncing of `dlt` state + +This destination fully supports [dlt state sync](../../general-usage/state#syncing-state-with-destination). + + \ No newline at end of file From 463ca1dfee849549956b8dcd3d0ba3b782289e59 Mon Sep 17 00:00:00 2001 From: Marcin Rudolf Date: Wed, 10 Apr 2024 23:57:01 +0200 Subject: [PATCH 079/127] auto compression for parquet, detects compression of local files --- dlt/destinations/impl/clickhouse/clickhouse.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index c4e7b9c02e..259c0b12d2 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -174,7 +174,12 @@ def __init__( file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - compression = "none" if config.get("data_writer.disable_compression") else "gz" + if file_extension == "parquet": + # Auto works for parquet + compression = "auto" + else: + # It does not work for json + compression = "none" if config.get("data_writer.disable_compression") else "gz" statement: str = "" @@ -193,7 +198,6 @@ def __init__( access_key_id = None secret_access_key = None - clickhouse_format = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] structure = "auto" template = Template(""" @@ -234,6 +238,10 @@ def __init__( statement = f"INSERT INTO {qualified_table_name} {table_function}" elif not bucket_path: # Local filesystem. + if file_extension == "parquet": + compression = "auto" + else: + compression = "gz" if FileStorage.is_gzipped(file_path) else "none" try: with clickhouse_connect.create_client( host=client.credentials.host, @@ -252,7 +260,7 @@ def __init__( "allow_experimental_lightweight_delete": 1, "allow_experimental_object_type": 1, }, - compression=None if compression == "none" else compression, + compression=compression, ) except clickhouse_connect.driver.exceptions.Error as e: raise LoadJobTerminalException( From e7e5925be85f74b48f6ad31a2e4b095a560708d7 Mon Sep 17 00:00:00 2001 From: Marcin Rudolf Date: Wed, 10 Apr 2024 23:57:59 +0200 Subject: [PATCH 080/127] fixes has_dataset, recognizes more exceptions --- .../impl/clickhouse/sql_client.py | 36 +++++++++++-------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index fd3c9a401b..5710e41e49 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -3,6 +3,7 @@ Iterator, AnyStr, Any, + List, Optional, Sequence, ClassVar, @@ -44,7 +45,7 @@ class ClickHouseDBApiCursorImpl(DBApiCursorImpl): class ClickHouseSqlClient( SqlClientBase[clickhouse_driver.dbapi.connection.Connection], DBTransaction ): - dbapi: ClassVar[DBApi] = clickhouse_driver.dbapi.connection.Connection + dbapi: ClassVar[DBApi] = clickhouse_driver.dbapi capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() def __init__(self, dataset_name: str, credentials: ClickHouseCredentials) -> None: @@ -54,7 +55,7 @@ def __init__(self, dataset_name: str, credentials: ClickHouseCredentials) -> Non self.database_name = credentials.database def has_dataset(self) -> bool: - return super().has_dataset() + return len(self._list_tables()) > 0 def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: self._conn = clickhouse_driver.dbapi.connect( @@ -98,7 +99,17 @@ def create_dataset(self) -> None: def drop_dataset(self) -> None: # Since ClickHouse doesn't have schemas, we need to drop all tables in our virtual schema, # or collection of tables, that has the `dataset_name` as a prefix. - to_drop_results = self.execute_sql( + to_drop_results = self._list_tables() + for table in to_drop_results: + # The "DROP TABLE" clause is discarded if we allow clickhouse_driver to handle parameter substitution. + # This is because the driver incorrectly substitutes the entire query string, causing the "DROP TABLE" keyword to be omitted. + # To resolve this, we are forced to provide the full query string here. + self.execute_sql( + f"""DROP TABLE {self.capabilities.escape_identifier(self.database_name)}.{self.capabilities.escape_identifier(table)} SYNC""" + ) + + def _list_tables(self) -> List[str]: + rows = self.execute_sql( """ SELECT name FROM system.tables @@ -110,14 +121,7 @@ def drop_dataset(self) -> None: f"{self.dataset_name}%", ), ) - for to_drop_result in to_drop_results: - table = to_drop_result[0] - # The "DROP TABLE" clause is discarded if we allow clickhouse_driver to handle parameter substitution. - # This is because the driver incorrectly substitutes the entire query string, causing the "DROP TABLE" keyword to be omitted. - # To resolve this, we are forced to provide the full query string here. - self.execute_sql( - f"""DROP TABLE {self.capabilities.escape_identifier(self.database_name)}.{self.capabilities.escape_identifier(table)} SYNC""" - ) + return [row[0] for row in rows] @contextmanager @raise_database_error @@ -172,12 +176,14 @@ def make_qualified_table_name(self, table_name: str, escape: bool = True) -> str return f"{database_name}.{dataset_and_table}" @classmethod - def _make_database_exception(cls, ex: Exception) -> Exception: # type: ignore[return] + def _make_database_exception(cls, ex: Exception) -> Exception: if isinstance(ex, clickhouse_driver.dbapi.errors.OperationalError): - if "Code: 57." in str(ex) or "Code: 82." in str(ex): - raise DatabaseTerminalException(ex) + if "Code: 57." in str(ex) or "Code: 82." in str(ex) or "Code: 47." in str(ex): + return DatabaseTerminalException(ex) elif "Code: 60." in str(ex) or "Code: 81." in str(ex): - raise DatabaseUndefinedRelation(ex) + return DatabaseUndefinedRelation(ex) + else: + return DatabaseTransientException(ex) elif isinstance( ex, ( From 871aa4a21dfce689764c81c360b061b944defefa Mon Sep 17 00:00:00 2001 From: Marcin Rudolf Date: Wed, 10 Apr 2024 23:58:15 +0200 Subject: [PATCH 081/127] fixes some tests --- tests/load/pipeline/test_arrow_loading.py | 11 +++++++++-- tests/load/test_sql_client.py | 11 +++++++---- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/tests/load/pipeline/test_arrow_loading.py b/tests/load/pipeline/test_arrow_loading.py index 82ccb24bf1..294c479e94 100644 --- a/tests/load/pipeline/test_arrow_loading.py +++ b/tests/load/pipeline/test_arrow_loading.py @@ -1,11 +1,11 @@ from datetime import datetime # noqa: I251 -from typing import Any, Union, List, Dict, Tuple, Literal import os import pytest import numpy as np import pyarrow as pa import pandas as pd +import base64 import dlt from dlt.common import pendulum @@ -42,6 +42,7 @@ def test_load_arrow_item( "redshift", "databricks", "synapse", + "clickhouse", ) # athena/redshift can't load TIME columns include_binary = not ( destination_config.destination in ("redshift", "databricks") @@ -102,11 +103,17 @@ def some_data(): row[i] = row[i].tobytes() if destination_config.destination == "redshift": - # Binary columns are hex formatted in results + # Redshift needs hex string for record in records: if "binary" in record: record["binary"] = record["binary"].hex() + if destination_config.destination == "clickhouse": + # Clickhouse needs base64 string + for record in records: + if "binary" in record: + record["binary"] = base64.b64encode(record["binary"]).decode("ascii") + for row in rows: for i in range(len(row)): if isinstance(row[i], datetime): diff --git a/tests/load/test_sql_client.py b/tests/load/test_sql_client.py index bd1ec5ba43..e9ddddcbe0 100644 --- a/tests/load/test_sql_client.py +++ b/tests/load/test_sql_client.py @@ -369,7 +369,7 @@ def test_database_exceptions(client: SqlJobClientBase) -> None: with client.sql_client.execute_query(f"DELETE FROM {qualified_name} WHERE 1=1"): pass assert client.sql_client.is_dbapi_exception(term_ex.value.dbapi_exception) - if client.config.destination_type != "dremio": + if client.config.destination_type not in ["dremio", "clickhouse"]: with pytest.raises(DatabaseUndefinedRelation) as term_ex: with client.sql_client.execute_query("DROP SCHEMA UNKNOWN"): pass @@ -630,18 +630,21 @@ def assert_load_id(sql_client: SqlClientBase[TNativeConn], load_id: str) -> None def prepare_temp_table(client: SqlJobClientBase) -> str: uniq_suffix = uniq_id() table_name = f"tmp_{uniq_suffix}" - iceberg_table_suffix = "" + ddl_suffix = "" coltype = "numeric" if client.config.destination_type == "athena": - iceberg_table_suffix = ( + ddl_suffix = ( f"LOCATION '{AWS_BUCKET}/ci/{table_name}' TBLPROPERTIES ('table_type'='ICEBERG'," " 'format'='parquet');" ) coltype = "bigint" qualified_table_name = table_name + if client.config.destination_type == "clickhouse": + ddl_suffix = "ENGINE = MergeTree() ORDER BY col" + qualified_table_name = client.sql_client.make_qualified_table_name(table_name) else: qualified_table_name = client.sql_client.make_qualified_table_name(table_name) client.sql_client.execute_sql( - f"CREATE TABLE {qualified_table_name} (col {coltype}) {iceberg_table_suffix};" + f"CREATE TABLE {qualified_table_name} (col {coltype}) {ddl_suffix};" ) return table_name From 8ed4919fdcf4c4f2b7ecb5aa3e34daf5174470e9 Mon Sep 17 00:00:00 2001 From: Marcin Rudolf Date: Thu, 11 Apr 2024 00:21:17 +0200 Subject: [PATCH 082/127] aligns clickhouse config with dataclasses --- .../impl/clickhouse/configuration.py | 23 +++---------------- .../test_clickhouse_table_builder.py | 2 +- 2 files changed, 4 insertions(+), 21 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 075150538d..34bac5b43b 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -73,9 +73,9 @@ def to_url(self) -> URL: @configspec class ClickHouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): - destination_type: Final[str] = "clickhouse" # type: ignore[misc] - credentials: ClickHouseCredentials # type: ignore - dataset_name: Final[str] = "" # type: ignore + destination_type: Final[str] = dataclasses.field(default="clickhouse", init=False, repr=False, compare=False) # type: ignore[misc] + credentials: ClickHouseCredentials = None + dataset_name: Final[str] = dataclasses.field(default="", init=False, repr=False, compare=False) # type: ignore[misc] """dataset name in the destination to load data to, for schemas that are not default schema, it is used as dataset prefix""" # Primary key columns are used to build a sparse primary index which allows for efficient data retrieval, @@ -88,20 +88,3 @@ def fingerprint(self) -> str: if self.credentials and self.credentials.host: return digest128(self.credentials.host) return "" - - if TYPE_CHECKING: - - def __init__( - self, - *, - credentials: ClickHouseCredentials = None, - dataset_name: str = None, - destination_name: str = None, - environment: str = None - ) -> None: - super().__init__( - credentials=credentials, - destination_name=destination_name, - environment=environment, - ) - ... diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 9d3fadfc47..9db87dc233 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -20,7 +20,7 @@ def clickhouse_client(empty_schema: Schema) -> ClickHouseClient: creds = ClickHouseCredentials() return ClickHouseClient( empty_schema, - ClickHouseClientConfiguration(dataset_name=f"test_{uniq_id()}", credentials=creds), + ClickHouseClientConfiguration(credentials=creds)._bind_dataset_name(f"test_{uniq_id()}"), ) From dce1967f3d32cc7e52a619cd17d19cba55ab67b8 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 11 Apr 2024 20:43:01 +0200 Subject: [PATCH 083/127] Remove empty dataset default #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/configuration.py | 7 +++--- .../dlt-ecosystem/destinations/clickhouse.md | 22 ++++++++++++------- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 34bac5b43b..7d729933c0 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -41,9 +41,10 @@ class ClickHouseCredentials(ConnectionStringCredentials): "host", "port", "http_port", - "username", - "database", "secure", + "database", + "username", + "password", "connect_timeout", "send_receive_timeout", "dataset_table_separator", @@ -75,8 +76,6 @@ def to_url(self) -> URL: class ClickHouseClientConfiguration(DestinationClientDwhWithStagingConfiguration): destination_type: Final[str] = dataclasses.field(default="clickhouse", init=False, repr=False, compare=False) # type: ignore[misc] credentials: ClickHouseCredentials = None - dataset_name: Final[str] = dataclasses.field(default="", init=False, repr=False, compare=False) # type: ignore[misc] - """dataset name in the destination to load data to, for schemas that are not default schema, it is used as dataset prefix""" # Primary key columns are used to build a sparse primary index which allows for efficient data retrieval, # but they do not enforce uniqueness constraints. It permits duplicate values even for the primary key diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index f806a449e8..f2bcec33ab 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -43,10 +43,13 @@ To load data into ClickHouse, you need to create a ClickHouse database. While we 2. To create a new database, connect to your ClickHouse server using the `clickhouse-client` command line tool or a SQL client of your choice. -3. Run the following SQL command to create a new database: +3. Run the following SQL commands to create a new database, user and grant the necessary permissions: ```sql - CREATE DATABASE IF NOT EXISTS dlt_data; + CREATE DATABASE IF NOT EXISTS dlt; + CREATE USER dlt IDENTIFIED WITH sha256_password BY 'my_password' + GRANT ALL ON dlt.* TO dlt; + GRANT CREATE TEMPORARY TABLE, S3 ON *.* TO dlt; ``` ### 3. Add credentials @@ -55,17 +58,20 @@ To load data into ClickHouse, you need to create a ClickHouse database. While we ```toml [destination.clickhouse.credentials] - database = "dlt_data" # the database name you created - username = "default" # ClickHouse username, default is usually "default" - password = "" # ClickHouse password if any - host = "localhost" # ClickHouse server host - port = 9000 # ClickHouse HTTP port, default is 9000 - secure = false # set to true if using HTTPS + database = "dlt_data" # the database name you created + username = "default" # ClickHouse username, default is usually "default" + password = "" # ClickHouse password if any + host = "localhost" # ClickHouse server host + port = 9000 # ClickHouse HTTP port, default is 9000 + http_port = 8443 # HTTP Port to connect to ClickHouse server's HTTP interface. + secure = 1 # Set to 1 if using HTTPS, else 0. + dataset_table_separator = "___" # Separator for dataset table names, defaults to '___', i.e. 'database.dataset___table'. ``` 2. You can pass a database connection string similar to the one used by the `clickhouse-driver` library. The credentials above will look like this: ```toml + # keep it at the top of your toml file, before any section starts. destination.clickhouse.credentials="clickhouse://default:password@localhost/dlt_data?secure=false" ``` From d1c7cde8b065343b6eb4fb18331e93f58a61bd95 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Fri, 12 Apr 2024 10:26:19 +0200 Subject: [PATCH 084/127] Update clickhouse configuration and docs sidebar Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/configuration.py | 2 +- docs/website/sidebars.js | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 7d729933c0..d6352a1467 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -1,5 +1,5 @@ import dataclasses -from typing import ClassVar, List, Any, Final, TYPE_CHECKING, Literal, cast +from typing import ClassVar, List, Any, Final, Literal, cast from dlt.common.configuration import configspec from dlt.common.configuration.specs import ConnectionStringCredentials diff --git a/docs/website/sidebars.js b/docs/website/sidebars.js index 418ac2efd6..4ba8dcac58 100644 --- a/docs/website/sidebars.js +++ b/docs/website/sidebars.js @@ -105,6 +105,7 @@ const sidebars = { 'dlt-ecosystem/destinations/duckdb', 'dlt-ecosystem/destinations/mssql', 'dlt-ecosystem/destinations/synapse', + 'dlt-ecosystem/destinations/clickhouse', 'dlt-ecosystem/destinations/filesystem', 'dlt-ecosystem/destinations/postgres', 'dlt-ecosystem/destinations/redshift', From 66fbc0edfb0621d3f7a1a99f2ee4e28ef8d3bd12 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Sat, 13 Apr 2024 21:55:32 +0200 Subject: [PATCH 085/127] Clickhouse docs #1055 Signed-off-by: Marcel Coetzee --- .../dlt-ecosystem/destinations/clickhouse.md | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index f2bcec33ab..ed4b8c7516 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -47,8 +47,9 @@ To load data into ClickHouse, you need to create a ClickHouse database. While we ```sql CREATE DATABASE IF NOT EXISTS dlt; - CREATE USER dlt IDENTIFIED WITH sha256_password BY 'my_password' - GRANT ALL ON dlt.* TO dlt; + CREATE USER dlt IDENTIFIED WITH sha256_password BY 'Dlt*12345789234567'; + GRANT CREATE, ALTER, SELECT, DELETE, DROP, TRUNCATE, OPTIMIZE, SHOW, INSERT, dictGet ON dlt.* TO dlt; + GRANT SELECT ON INFORMATION_SCHEMA.COLUMNS TO dlt; GRANT CREATE TEMPORARY TABLE, S3 ON *.* TO dlt; ``` @@ -58,21 +59,21 @@ To load data into ClickHouse, you need to create a ClickHouse database. While we ```toml [destination.clickhouse.credentials] - database = "dlt_data" # the database name you created - username = "default" # ClickHouse username, default is usually "default" - password = "" # ClickHouse password if any + database = "dlt" # The database name you created + username = "dlt" # ClickHouse username, default is usually "default" + password = "Dlt*12345789234567" # ClickHouse password if any host = "localhost" # ClickHouse server host port = 9000 # ClickHouse HTTP port, default is 9000 http_port = 8443 # HTTP Port to connect to ClickHouse server's HTTP interface. secure = 1 # Set to 1 if using HTTPS, else 0. - dataset_table_separator = "___" # Separator for dataset table names, defaults to '___', i.e. 'database.dataset___table'. + dataset_table_separator = "___" # Separator for dataset table names from dataset. ``` 2. You can pass a database connection string similar to the one used by the `clickhouse-driver` library. The credentials above will look like this: ```toml # keep it at the top of your toml file, before any section starts. - destination.clickhouse.credentials="clickhouse://default:password@localhost/dlt_data?secure=false" + destination.clickhouse.credentials="clickhouse://dlt:Dlt*12345789234567@localhost:9000/dlt?secure=1" ``` ## Write disposition @@ -84,14 +85,13 @@ All [write dispositions](../../general-usage/incremental-loading#choosing-a-writ Data is loaded into ClickHouse using the most efficient method depending on the data source: - For local files, the `clickhouse-connect` library is used to directly load files into ClickHouse tables using the `INSERT` command. - - For files in remote storage like S3, Google Cloud Storage, or Azure Blob Storage, ClickHouse table functions like `s3`, `gcs` and `azureBlobStorage` are used to read the files and insert the data into tables. ## Supported file formats - [jsonl](../file-formats/jsonl.md) is the preferred format for both direct loading and staging. -- [parquet](../file-formats/parquet.md) is also supported for both direct loading and staging. +- [parquet](../file-formats/parquet.md) is supported for both direct loading and staging. ## Supported column hints @@ -139,7 +139,7 @@ pipeline = dlt.pipeline( ### dbt support -Integration with [dbt](../transformations/dbt/dbt.md) is currently not supported. +Integration with [dbt](../transformations/dbt/dbt.md) is supported. ### Syncing of `dlt` state From 077455d90a83fe276848916791c3ff06d850d369 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 16 Apr 2024 00:58:14 +0200 Subject: [PATCH 086/127] Don't use Jinja #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 259c0b12d2..3489c5cacb 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -6,7 +6,6 @@ import clickhouse_connect from clickhouse_connect.driver.tools import insert_file -from jinja2 import Template import dlt from dlt import config @@ -200,19 +199,15 @@ def __init__( structure = "auto" - template = Template(""" - SELECT * FROM s3('{{ url }}'{% if access_key_id and secret_access_key %}, - '{{ access_key_id }}','{{ secret_access_key }}'{% else %},NOSIGN{% endif %},'{{ clickhouse_format }}','{{ structure }}','{{ compression }}') - """) - - table_function = template.render( - url=bucket_http_url, - access_key_id=access_key_id, - secret_access_key=secret_access_key, - clickhouse_format=clickhouse_format, - structure=structure, - compression=compression, - ).strip() + table_function = f"SELECT * FROM s3('{bucket_http_url}'" + + if access_key_id and secret_access_key: + table_function += f",'{access_key_id}','{secret_access_key}'" + else: + table_function += ",NOSIGN" + + table_function += f",'{clickhouse_format}','{structure}','{compression}')" + statement = f"INSERT INTO {qualified_table_name} {table_function}" elif bucket_scheme in ("az", "abfs"): From 65bf25004dcbeb14e55f6d3fe2218c392f9cc2e9 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 16 Apr 2024 16:05:28 +0200 Subject: [PATCH 087/127] udpate clickhouse workflow file --- .../workflows/test_destination_clickhouse.yml | 50 ++++++------------- 1 file changed, 16 insertions(+), 34 deletions(-) diff --git a/.github/workflows/test_destination_clickhouse.yml b/.github/workflows/test_destination_clickhouse.yml index 57cb9f953f..002d427b3f 100644 --- a/.github/workflows/test_destination_clickhouse.yml +++ b/.github/workflows/test_destination_clickhouse.yml @@ -1,4 +1,5 @@ -name: test clickhouse + +name: test | clickhouse on: pull_request: @@ -6,14 +7,14 @@ on: - master - devel workflow_dispatch: + schedule: + - cron: '0 2 * * *' concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: - DLT_SECRETS_TOML: ${{ secrets.DLT_SECRETS_TOML }} - RUNTIME__SENTRY_DSN: https://6f6f7b6f8e0f458a89be4187603b55fe@o1061158.ingest.sentry.io/4504819859914752 RUNTIME__LOG_LEVEL: ERROR @@ -22,22 +23,18 @@ env: jobs: get_docs_changes: + name: docs changes uses: ./.github/workflows/get_docs_changes.yml - if: ${{ !github.event.pull_request.head.repo.fork }} + if: ${{ !github.event.pull_request.head.repo.fork || contains(github.event.pull_request.labels.*.name, 'ci from fork')}} run_loader: - name: Tests Clickhouse loader + name: test | clickhouse tests needs: get_docs_changes if: needs.get_docs_changes.outputs.changes_outside_docs == 'true' - strategy: - fail-fast: false - matrix: - os: [ "ubuntu-latest" ] - # os: ["ubuntu-latest", "macos-latest", "windows-latest"] defaults: run: shell: bash - runs-on: ${{ matrix.os }} + runs-on: "ubuntu-latest" steps: @@ -59,34 +56,19 @@ jobs: - name: Load cached venv id: cached-poetry-dependencies uses: actions/cache@v3 - with: # path: ${{ steps.pip-cache.outputs.dir }} + with: path: .venv key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}-gcp - name: Install dependencies - # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' run: poetry install --no-interaction -E clickhouse --with providers -E parquet --with sentry-sdk --with pipeline - - name: create secrets.toml - run: pwd && echo "$DLT_SECRETS_TOML" > tests/.dlt/secrets.toml - - run: | - poetry run pytest tests/helpers/providers tests/load - if: runner.os != 'Windows' - name: Run tests Linux/MAC - - run: | - poetry run pytest tests/helpers/providers tests/load - if: runner.os == 'Windows' - name: Run tests Windows - shell: cmd + poetry run pytest tests/load -m "essential" + name: Run essential tests Linux + if: ${{ ! (contains(github.event.pull_request.labels.*.name, 'ci full') || github.event_name == 'schedule')}} - matrix_job_required_check: - name: Clickhouse loader tests - needs: run_loader - runs-on: ubuntu-latest - if: always() - steps: - - name: Check matrix job results - if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') - run: | - echo "One or more matrix job tests failed or were cancelled. You may need to re-run them." && exit 1 + - run: | + poetry run pytest tests/load + name: Run all tests Linux + if: ${{ contains(github.event.pull_request.labels.*.name, 'ci full') || github.event_name == 'schedule'}} From e61f68192ea5569e93e11c0ecb245c62cc3c1e89 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 16 Apr 2024 16:19:34 +0200 Subject: [PATCH 088/127] add missing secrets to clickhouse workflow --- .github/workflows/test_destination_clickhouse.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/test_destination_clickhouse.yml b/.github/workflows/test_destination_clickhouse.yml index 002d427b3f..d834df6b28 100644 --- a/.github/workflows/test_destination_clickhouse.yml +++ b/.github/workflows/test_destination_clickhouse.yml @@ -17,6 +17,7 @@ concurrency: env: RUNTIME__SENTRY_DSN: https://6f6f7b6f8e0f458a89be4187603b55fe@o1061158.ingest.sentry.io/4504819859914752 RUNTIME__LOG_LEVEL: ERROR + DLT_SECRETS_TOML: ${{ secrets.DLT_SECRETS_TOML }} ACTIVE_DESTINATIONS: "[\"clickhouse\"]" ALL_FILESYSTEM_DRIVERS: "[\"memory\"]" @@ -63,6 +64,9 @@ jobs: - name: Install dependencies run: poetry install --no-interaction -E clickhouse --with providers -E parquet --with sentry-sdk --with pipeline + - name: create secrets.toml + run: pwd && echo "$DLT_SECRETS_TOML" > tests/.dlt/secrets.toml + - run: | poetry run pytest tests/load -m "essential" name: Run essential tests Linux From 027d52a726a958ef2c8af908e0772afcc391ee60 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Tue, 16 Apr 2024 23:09:49 +0200 Subject: [PATCH 089/127] Add test for clickhouse config settings #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/clickhouse.py | 1 + .../impl/clickhouse/sql_client.py | 11 +----- .../test_clickhouse_configuration.py | 38 ++++++++++++++++++- 3 files changed, 39 insertions(+), 11 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 3489c5cacb..8575c5a52c 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -254,6 +254,7 @@ def __init__( settings={ "allow_experimental_lightweight_delete": 1, "allow_experimental_object_type": 1, + "enable_http_compression": 1, }, compression=compression, ) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 5710e41e49..8fafa7ba74 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -14,7 +14,6 @@ from clickhouse_driver.dbapi import OperationalError # type: ignore[import-untyped] from clickhouse_driver.dbapi.extras import DictCursor # type: ignore[import-untyped] -import dlt from dlt.common.destination import DestinationCapabilitiesContext from dlt.destinations.exceptions import ( DatabaseUndefinedRelation, @@ -128,7 +127,7 @@ def _list_tables(self) -> List[str]: def execute_query( self, query: AnyStr, *args: Any, **kwargs: Any ) -> Iterator[ClickHouseDBApiCursorImpl]: - assert isinstance(query, str), "Query must be a string" + assert isinstance(query, str), "Query must be a string." db_args = kwargs.copy() @@ -136,14 +135,6 @@ def execute_query( query, db_args = _convert_to_old_pyformat(query, args, OperationalError) db_args.update(kwargs) - # Prefix each query transaction with experimental settings. - # These are necessary for nested datatypes to be available and other operations to work. - query = ( - "set allow_experimental_lightweight_delete = 1;" - "set allow_experimental_object_type = 1;" - "set enable_http_compression= 1;" - f"{query}" - ) with self._conn.cursor() as cursor: for query_line in query.split(";"): if query_line := query_line.strip(): diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index 61862170ed..5268c7bd22 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -1,11 +1,13 @@ -from typing import Any +from typing import Any, Iterator import pytest import dlt from dlt.common.configuration.resolve import resolve_configuration from dlt.common.libs.sql_alchemy import make_url +from dlt.common.storages import FileStorage from dlt.common.utils import digest128 +from dlt.destinations.impl.clickhouse.clickhouse import ClickHouseClient from dlt.destinations.impl.clickhouse.configuration import ( ClickHouseCredentials, ClickHouseClientConfiguration, @@ -15,6 +17,23 @@ SnowflakeCredentials, ) from tests.common.configuration.utils import environment +from tests.load.utils import yield_client_with_storage +from tests.utils import TEST_STORAGE_ROOT, delete_test_storage + + +@pytest.fixture +def file_storage() -> FileStorage: + return FileStorage(TEST_STORAGE_ROOT, file_type="b", makedirs=True) + + +@pytest.fixture(autouse=True) +def auto_delete_storage() -> None: + delete_test_storage() + + +@pytest.fixture(scope="function") +def client() -> Iterator[ClickHouseClient]: + yield from yield_client_with_storage("clickhouse") # type: ignore def test_clickhouse_connection_string_with_all_params() -> None: @@ -58,3 +77,20 @@ def test_clickhouse_gcp_hmac_getter_accessor(environment: Any) -> None: assert ( dlt.config["destination.filesystem.credentials.gcp_secret_access_key"] == "ascvntp45uasdf" ) + + +def test_clickhouse_connection_settings(client: ClickHouseClient) -> None: + """Test experimental settings are set correctly for session.""" + conn = client.sql_client.open_connection() + cursor1 = conn.cursor() + cursor2 = conn.cursor() + + cursors = [cursor1, cursor2] + + for cursor in cursors: + cursor.execute("SELECT name, value FROM system.settings") + res = cursor.fetchall() + + assert ("allow_experimental_lightweight_delete", "1") in res + assert ("allow_experimental_object_type", "1") in res + assert ("enable_http_compression", "1") in res From ecfe173ce4671f9435a21c06859b424081704ff7 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 17 Apr 2024 00:27:06 +0200 Subject: [PATCH 090/127] Set experimental session in DSN #1055 Signed-off-by: Marcel Coetzee --- .../impl/clickhouse/configuration.py | 4 ++++ dlt/destinations/impl/clickhouse/sql_client.py | 4 +--- .../test_clickhouse_configuration.py | 18 +++++------------- 3 files changed, 10 insertions(+), 16 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index d6352a1467..71d9d1de38 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -67,6 +67,10 @@ def to_url(self) -> URL: ("connect_timeout", str(self.connect_timeout)), ("send_receive_timeout", str(self.send_receive_timeout)), ("secure", str(1) if self.secure else str(0)), + # Toggle experimental settings. These are necessary for certain datatypes and not optional. + ("allow_experimental_lightweight_delete", "1"), + ("allow_experimental_object_type", "1"), + ("enable_http_compression", "1"), ] ) return url diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 8fafa7ba74..fdddf7de3c 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -57,9 +57,7 @@ def has_dataset(self) -> bool: return len(self._list_tables()) > 0 def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: - self._conn = clickhouse_driver.dbapi.connect( - dsn=self.credentials.to_native_representation() - ) + self._conn = clickhouse_driver.connect(dsn=self.credentials.to_native_representation()) return self._conn @raise_open_connection_error diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index 5268c7bd22..dcbf7e0935 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -5,7 +5,6 @@ import dlt from dlt.common.configuration.resolve import resolve_configuration from dlt.common.libs.sql_alchemy import make_url -from dlt.common.storages import FileStorage from dlt.common.utils import digest128 from dlt.destinations.impl.clickhouse.clickhouse import ClickHouseClient from dlt.destinations.impl.clickhouse.configuration import ( @@ -18,17 +17,6 @@ ) from tests.common.configuration.utils import environment from tests.load.utils import yield_client_with_storage -from tests.utils import TEST_STORAGE_ROOT, delete_test_storage - - -@pytest.fixture -def file_storage() -> FileStorage: - return FileStorage(TEST_STORAGE_ROOT, file_type="b", makedirs=True) - - -@pytest.fixture(autouse=True) -def auto_delete_storage() -> None: - delete_test_storage() @pytest.fixture(scope="function") @@ -37,7 +25,11 @@ def client() -> Iterator[ClickHouseClient]: def test_clickhouse_connection_string_with_all_params() -> None: - url = "clickhouse://user1:pass1@host1:9000/testdb?secure=0&connect_timeout=230&send_receive_timeout=1000" + url = ( + "clickhouse://user1:pass1@host1:9000/testdb?allow_experimental_lightweight_delete=1&" + "allow_experimental_object_type=1&connect_timeout=230&enable_http_compression=1&secure=0" + "&send_receive_timeout=1000" + ) creds = ClickHouseCredentials() creds.parse_native_representation(url) From 6ca11c88e609cb9b5a897b9ea52823e9ab68a62a Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 17 Apr 2024 22:08:14 +0200 Subject: [PATCH 091/127] Update data mapping and test for ClickHouse Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 8 ++------ tests/load/test_job_client.py | 10 ++++++++-- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 8575c5a52c..eebd3046fc 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -78,6 +78,7 @@ class ClickHouseTypeMapper(TypeMapper): "bool": "Boolean", "date": "Date", "timestamp": "DateTime('UTC')", + "time": "String", "bigint": "Int64", "binary": "String", "wei": "Decimal", @@ -86,7 +87,7 @@ class ClickHouseTypeMapper(TypeMapper): sct_to_dbt = { "decimal": "Decimal(%i,%i)", "wei": "Decimal(%i,%i)", - "timestamp": "DateTime(%i,'UTC')", + "timestamp": "DateTime64(%i,'UTC')", } dbt_to_sct = { @@ -95,16 +96,11 @@ class ClickHouseTypeMapper(TypeMapper): "Bool": "bool", "Date": "date", "DateTime": "timestamp", - "DateTime64": "timestamp", - "Time": "timestamp", "Int64": "bigint", "Object('json')": "complex", "Decimal": "decimal", } - def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = None) -> str: - return "DateTime" - def from_db_type( self, db_type: str, precision: Optional[int] = None, scale: Optional[int] = None ) -> TColumnType: diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index 471ea654b5..cfab290e09 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -516,7 +516,9 @@ def test_load_with_all_types( pytest.skip("preferred loader file format not set, destination will only work with staging") table_name = "event_test_table" + uniq_id() column_schemas, data_types = table_update_and_row( - exclude_types=["time"] if client.config.destination_type == "databricks" else None, + exclude_types=( + ["time"] if client.config.destination_type in ["databricks", "clickhouse"] else None + ), ) # we should have identical content with all disposition types client.schema.update_table( @@ -544,7 +546,11 @@ def test_load_with_all_types( expect_load_file(client, file_storage, query, table_name) db_row = list(client.sql_client.execute_sql(f"SELECT * FROM {canonical_name}")[0]) # content must equal - assert_all_data_types_row(db_row, schema=column_schemas) + assert_all_data_types_row( + db_row, + schema=column_schemas, + allow_base64_binary=True if client.config.destination_type in ["clickhouse"] else False, + ) @pytest.mark.parametrize( From 0cf5f63c6b0920f378dd4a4eae18f702e4963eef Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 17 Apr 2024 22:24:46 +0200 Subject: [PATCH 092/127] Revert previous Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 5 +++++ tests/load/test_job_client.py | 1 - 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index eebd3046fc..5c8d710880 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -96,11 +96,16 @@ class ClickHouseTypeMapper(TypeMapper): "Bool": "bool", "Date": "date", "DateTime": "timestamp", + "DateTime64": "timestamp", + "Time": "timestamp", "Int64": "bigint", "Object('json')": "complex", "Decimal": "decimal", } + def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = None) -> str: + return "DateTime" + def from_db_type( self, db_type: str, precision: Optional[int] = None, scale: Optional[int] = None ) -> TColumnType: diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index cfab290e09..4bf15bbde7 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -394,7 +394,6 @@ def test_get_storage_table_with_all_types(client: SqlJobClientBase) -> None: if client.config.destination_type == "databricks" and c["data_type"] in ("complex", "time"): continue # ClickHouse has no active data type for binary or time type. - # TODO: JSON type is available, but not nullable in ClickHouse. if client.config.destination_type == "clickhouse": if c["data_type"] in ("binary", "time"): continue From e5500ced3a1f3c00f7064a0f8ca082c2fac59319 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 18 Apr 2024 00:50:32 +0200 Subject: [PATCH 093/127] Fix table aliasing issue Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 5c8d710880..9c1bc5936f 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -285,7 +285,20 @@ def exception(self) -> str: class ClickHouseMergeJob(SqlMergeJob): @classmethod def _to_temp_table(cls, select_sql: str, temp_table_name: str) -> str: - return f"CREATE TEMPORARY TABLE {temp_table_name} AS {select_sql};" + return f"CREATE TABLE {temp_table_name} ENGINE = Memory AS {select_sql};" + + @classmethod + def gen_key_table_clauses( + cls, + root_table_name: str, + staging_root_table_name: str, + key_clauses: Sequence[str], + for_delete: bool, + ) -> List[str]: + join_conditions = " AND ".join([c.format(d="d", s="s") for c in key_clauses]) + return [ + f"FROM {root_table_name} AS d JOIN {staging_root_table_name} AS s ON {join_conditions}" + ] class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): From 8830f3ef9b1b543708e0ba847bcbd5d5223f5fa0 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 18 Apr 2024 15:09:29 +0200 Subject: [PATCH 094/127] remove additional clickhouse destinations from test setup --- tests/load/utils.py | 32 -------------------------------- 1 file changed, 32 deletions(-) diff --git a/tests/load/utils.py b/tests/load/utils.py index f49b8bf3cb..6156b33242 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -341,29 +341,6 @@ def destinations_configs( destination="clickhouse", staging="filesystem", file_format="parquet", - bucket_url=AWS_BUCKET, - extra_info="s3-authorization", - disable_compression=True, - ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="parquet", - bucket_url=AZ_BUCKET, - extra_info="az-authorization", - ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="parquet", - bucket_url=AZ_BUCKET, - extra_info="az-authorization", - disable_compression=True, - ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="jsonl", bucket_url=AZ_BUCKET, extra_info="az-authorization", ), @@ -373,7 +350,6 @@ def destinations_configs( file_format="jsonl", bucket_url=AZ_BUCKET, extra_info="az-authorization", - disable_compression=True, ), DestinationTestConfiguration( destination="clickhouse", @@ -389,14 +365,6 @@ def destinations_configs( bucket_url=AWS_BUCKET, extra_info="s3-authorization", ), - DestinationTestConfiguration( - destination="clickhouse", - staging="filesystem", - file_format="jsonl", - bucket_url=AWS_BUCKET, - disable_compression=True, - extra_info="s3-authorization", - ), DestinationTestConfiguration( destination="dremio", staging=filesystem(destination_name="minio"), From 91c257d5fef0f2e5411a21cf4936f888c4d44100 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 18 Apr 2024 15:23:21 +0200 Subject: [PATCH 095/127] fix lockfile --- poetry.lock | 546 ++++++++++++++++++++++++++++------------------------ 1 file changed, 294 insertions(+), 252 deletions(-) diff --git a/poetry.lock b/poetry.lock index 257714ad6a..48876a8303 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "about-time" @@ -1670,6 +1670,218 @@ files = [ click = ">=4.0" PyYAML = ">=3.11" +[[package]] +name = "clickhouse-connect" +version = "0.7.8" +description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" +optional = true +python-versions = "~=3.8" +files = [ + {file = "clickhouse-connect-0.7.8.tar.gz", hash = "sha256:dad10ba90eabfe215dfb1fef59f2821a95c752988e66f1093ca8590a51539b8f"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5319d4a6f5a3484d20ba28954aaf417a3c37aa874877ed64fb635632025b4987"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7e326370b0930dffb77d302f9ffbf418779f91c76b8386906e945b7986b625cf"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbb77a32f7cb5178f625cecef3c7924a8896e1273bb3c1c0819c64b60ab96347"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb302c45d79411ae862bf894a3bbc8fd2808c016bfe491db2c957e2803f71d62"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d5fd5f860ab28aa7096f79a866082251627218e5d994800dda6b4b860a5efc1"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1b3df18dee46483cc982043f0ddccd301cd7eb1fc01a7f292ba62f41640f8d4f"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b09dfcf48926b834090408b94d8be361c42b54fa892c29174b5883b34859577e"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9629122594f628aaa1188c6924298d3d7c36534de4241a58a68716da433ef07"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-win32.whl", hash = "sha256:7982e8c35a80c49e13c33889e7944ba265baf0e95e3b66db3d01efe5e1de4408"}, + {file = "clickhouse_connect-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:38024602cfe9c8889e14d75f99f51e70352ddefbe3db5bee15300e6888a7397f"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:073c550b8433e31955a3468013147f339ad03650929467b73b120c8a788aa64a"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f34829a5539b4643247e4c870ee04f9f03423ff35fecb22e0e5723d724f50986"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:959c3f324666ab97ccc324a0cf8b1c9eb2ecfcae32d05b3afc43f85b22aa486f"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20905d467c61b334fe20af9432f06ff8ddd706eb9fba4596a7f629ae58897a36"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f4193160c1a3bdd9569d407ec00d31c2c2c7f3748c97721005da496f609579b"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f77052d16947eb30b694b3627d8b3fd313976c26692a80ab45e2fcf9b7dfe156"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6e18df3466158bfaba9d5f52297701b31fa3d589a039392482a9a6be5341da17"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a2cfe42fca283c5f2e2d20cf79808c097c576d5a1f37e4b2870dcb39cea16c2"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-win32.whl", hash = "sha256:d964dc6e270f1f0d38cf2a780608329b5140877fb556f4c45bdf349900f5ebc8"}, + {file = "clickhouse_connect-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:344500947233ddc429fb4653407b063d3d4efae52a4d508ff38118ec7c8b9afb"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:36ee57a5ab2d755f1ce0b05c1e1170c0cd23a22222b7af3394d1069552d33b01"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02792590218e2b106a0dc87b0d21ab895c1643e9a62993b0b9922ac6a97ed0f5"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5983630c818ad87fba29cfd800880673263c1f5cfa8e199f6617d1ad5d041169"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a48eecc60fd522a76d03737063b14affb8b5e77a8446222da838857a91fce467"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e65271cc53099ca10cb0fa0ba24c6e35b82865ae58915e3d503584c13620ed0"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:29c6a031beeba8b5bdaaec06abf061a5a2ca60652f2c7d7f3dad99b4cfd9ccde"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:10c7780e8bae624f6fc5aa25cccf44c0d56a91d5c02511fedfc02befa3a4cba1"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:04cc78a342511798cd24a8e9304930a19057d775c6b91cd83972b3cab66d28a2"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-win32.whl", hash = "sha256:69ff583442b832a54919e5d475b3392f580e949b461e0b60eb993609075c1a90"}, + {file = "clickhouse_connect-0.7.8-cp312-cp312-win_amd64.whl", hash = "sha256:31662144b8c75db19789825effe18b1e380fa5c479846bc3b81690ecf28fd35b"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fc30ac90e5c660688f84da35434a2007fd7d846600e3c4039e4c38616c05575a"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a993bb9b0be4ba2e847bdf4ea1851fe9e5d0a2124a3bfd0bd97d1d376154d081"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0102bf3c5d84ef5cda05cc5c4de05992d0e5e46a992188199a08bdc36fbe94a3"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c08c09842873851b9b02df27f056da76f49aa777b1bd76108e386cd2c75fc0fa"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5a867e0a6556a7e6e7ef63c9c31561a47f5e2067758a89f583fbe03d093bd11"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ee9c165fe9149dbeb07a735bce4fbd928485dcf25ff0401b687fe9c64e1f4c38"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7e9fbb25ec59b1a8b306dbd448c458c722fab6f718adcfd2434113f1d2d12823"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c985706917d870164b385e932041dd7a396acf41fc79ffd79cf04fe5153667d6"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-win32.whl", hash = "sha256:7f6a1ffd05f18b599432d763d8f94b9e27023d4dd6a45419d4bce9bb94a8b7a8"}, + {file = "clickhouse_connect-0.7.8-cp38-cp38-win_amd64.whl", hash = "sha256:9fc821567221eea914b0bc79cd46d25b95687585e35514bd4ab20e10ac688e67"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2ba42da85cabf8077f84f06761009fb99e374eb72a9f76e5d5e3e5b796a53ac"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:15cc99ab4dcbc850dd541796a8310c273a9d18f8ebfc6dedd71884101c4a20a1"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e9832d3a6a2052cd1531463c492ac5175d17ff7c0e2129f0ca5525cc4f58a6"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74905ada295fe4ef8788b8fd8ebfa92277da0519535d3d379a8a6a4f3f6fc86"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28eca36b4b399e3e419b159e015f21a64545b75cf9a2515e5e5d8ef44fff1dba"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0015aeaecffef53c725a76963cfd60f74947dce82b7df525fba9bd5c8f096182"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:46f90b12dd9d289fba9dcca0c49f38dc4ab34135b63f93e549cd859c3f59acf1"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:460d56e33d66e1cca036b4adbba0008c897852cd21f6a3b4c32fcce8739d68f8"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-win32.whl", hash = "sha256:f945c97d6adaa9bfa12ce707bab91601a3c92d6548d0d6c0e3473413094d2e90"}, + {file = "clickhouse_connect-0.7.8-cp39-cp39-win_amd64.whl", hash = "sha256:9000dbf2d3130d008488d41cfa72c9b91c14a95eed28efc142ff8dd02d753042"}, + {file = "clickhouse_connect-0.7.8-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1c0467f4b7726389c908cc8b1c700aa86c888b5cad51715285122ff40526a16c"}, + {file = "clickhouse_connect-0.7.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb7c302ff0f8224347ceb9ed047ee0ef0d264dd338b096e7432972015751aab1"}, + {file = "clickhouse_connect-0.7.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f41c91b3a4342050232a1e513ab673e12d8b46105a3d0ab9a588c718d5108a65"}, + {file = "clickhouse_connect-0.7.8-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92dba0b57d7e56be04f4906668af8852c1fbd1c73abac5a72bba46d496ead6c9"}, + {file = "clickhouse_connect-0.7.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9bfd180121d4fd18d35e00134c8006cfbbbd73f52cf7709e25e64c0e3e0f18cc"}, + {file = "clickhouse_connect-0.7.8-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bd0caa30d5d2ecf64337d43086064ccf6abd21d66e2bc62b41233e6f603598ac"}, + {file = "clickhouse_connect-0.7.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdd5babe7e6b6815afe48c162b1b84b365eb8e35eb6371df0880a71b55d18aa2"}, + {file = "clickhouse_connect-0.7.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ac22b8615db9a028d7be0afadf2655f377a243922b381d69595b3a4f6ebd830"}, + {file = "clickhouse_connect-0.7.8-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a2584336f70f06e50bfd274827bdac9da1a94c98e90ae2f49f612d22bcfd202"}, + {file = "clickhouse_connect-0.7.8-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:668ff0a87e06f18cc45d8fa3dcf2868cfa9d108f695fd2222c6b7efb23d66eba"}, + {file = "clickhouse_connect-0.7.8-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e3ec29fd00421a0832ad03b876d33982df87d3f4d15858d0d0bd5cd366301c4c"}, + {file = "clickhouse_connect-0.7.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09d1f7ed3d24e6b6ee26b8e3a47fccc3f86da366287728acaf1e38cfe587a78d"}, + {file = "clickhouse_connect-0.7.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8467d9f9dab8869d7ca7d670b27abbb47393f69c0074060833e9fc01ed939ee5"}, + {file = "clickhouse_connect-0.7.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7282b19857f154ffbe2216c702a60b9a6aca5ae275ed6476fa4d12e18fba0941"}, + {file = "clickhouse_connect-0.7.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:42880228bd4c1e499543d29c667159e7eccc4011ed6ceea7a32289d2a3b981a0"}, +] + +[package.dependencies] +certifi = "*" +lz4 = "*" +pytz = "*" +urllib3 = ">=1.26" +zstandard = "*" + +[package.extras] +arrow = ["pyarrow"] +numpy = ["numpy"] +orjson = ["orjson"] +pandas = ["pandas"] +sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] +tzlocal = ["tzlocal"] + +[[package]] +name = "clickhouse-driver" +version = "0.2.7" +description = "Python driver with native interface for ClickHouse" +optional = true +python-versions = ">=3.7, <4" +files = [ + {file = "clickhouse-driver-0.2.7.tar.gz", hash = "sha256:299cfbe6d561955d88eeab6e09f3de31e2f6daccc6fdd904a59e46357d2d28d9"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c44fefc2fd44f432d5b162bfe34ad76840137c34167d46a18c554a7c7c6e3566"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e018452a7bf8d8c0adf958afbc5b0d29e402fc09a1fb34e9186293eae57f3b4e"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff8b09f8b13df28d2f91ee3d0d2edd9589cbda76b74acf60669112219cea8c9d"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54aa91c9512fd5a73f038cae4f67ca2ff0b2f8a84de846179a31530936ef4e20"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8342a7ba31ccb393ee31dfd61173aa84c995b4ac0b44d404adc8463534233d5"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:199000f8adf38fade0b5a52c273a396168105539de741a18ba3e68d7fc06e0e6"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60a2a40602b207506e505cfb184a81cd4b752bde17153bc0b32c3931ddb792f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5db3a26b18146b2b0b06d3f32ce588af5afaa38c719daf6f9606981514228a8b"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5579a31da1f3cf49630e43fbbb11cab891b78161abdcb33908b79820b7cd3a23"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cc39f0fb761aed96917b0f55679174a50f9591afc0e696e745cd698ef822661f"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9aa0f7c740e4e61886c6d388792c5d1a2084d4b5462e6dcfc24e30ca7e7f8e68"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2caee88b6eec7b33ddbccd24501ad99ff8ff2b0a6a4471945cbfb28947a9a791"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win32.whl", hash = "sha256:a4aef432cc7120a971eebb7ca2fddac4472e810b57e403d3a371b0c69cbb2bb0"}, + {file = "clickhouse_driver-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f307de7df6bc23ad5ec8a1ba1db157f4d14de673ddd4798f37790f23255605b0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbf3ca8919bf856ca6588669a863065fb732a32a6387095f64d19038fd99db9f"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab68b3d9b9d1386adfd3a57edd47b62858a145bf7ccc7f11b31d308195d966e5"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985a9d60044c5ad39c6e018b852c7105ec4ebfdf4c3abe23183b4867454e570a"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c94330054c8d92d2286898906f843f26e2f96fc2aa11a9a96a7b5593d299bf0"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92938f55c8f797e50e624a4b96e685178d043cdf0ede306a7fd4e7dda19b8dfd"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bd53e9bf49c3013d06f9e6d2812872d44b150f7a2d1cf18e1498257d42330e"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f1f8ed5404e283a9ded499c33eade2423fdc15e31f8a711d75e91f890d0f70b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a398085e4a1766d907ac32c282d4172db38a44243bde303372396208d1cbf4bb"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fa1808593123b6056f93808f0afbc7938f06a8149cb4e381aa7b1a234c1d3c18"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:0512d54ae23bd4a69278e04f42b651d7c71b63ba6043e2c6bd97b11329692f99"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5bc2b67e7e68f74ccebf95a8b3a13f13a7c34b89b32c9813103221de14c06c8b"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:04a37cdafc671cb796af3e566cef0aeb39111d82aebeecd9106a049434953b26"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win32.whl", hash = "sha256:019538c7c23e976538e5081dd2f77a8a40bf663c638a62d857ff05f42b0c9052"}, + {file = "clickhouse_driver-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5166643683584bc53fcadda73c65f6a9077feb472f3d167ecef1a1a7024973aa"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:59affab7b5a3c4aab5b6a730f606575efdefea213458de2eb14927ee4e0640f4"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcb93dd07fe65ac4f1a2bc0b8967911d4ad2152dbee000f025ea5cb575da5ecb"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55a48019b79181ae1ca90e980e74c5d413c3f8829f6744e2b056646c2d435a1a"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:507463c9157240fd7c3246781e8c30df8db3c80bf68925b36ff3ad4a80c4b924"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2d8d2295ee9e0cfab8ad77cb635a05da2160334b4f16ed8c3d00fbf39a2343"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e38c44546dcdb956b5ab0944cb3d51e8c98f816e75bab1a2254c478865bc6e7b"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6690a2bdd9e7531fe50b53193279f8b35cbcd5c5ee36c0fcc112518a7d24f16"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc6b4ba0a6467fd09021aa1d87a44fb4589600d61b010fca41e0dfffd0dee322"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:254bbd400eb87ff547a08755bc714f712e11f7a6d3ebbbb7aaa1dd454fb16d44"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7bbbe3f8b87fc1489bc15fa9c88cc9fac9d4d7d683d076f058c2c83e6ee422fd"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:745e5b18f0957d932151527f1523d0e516c199de8c589638e5f55ab2559886f3"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fa0357fb5f26149e3df86a117d3678329b85d8827b78a5a09bbf224d8dd4541"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win32.whl", hash = "sha256:ace652af7ca94ba3cb3a04a5c363e135dc5009f31d8201903e21db9d5daf2358"}, + {file = "clickhouse_driver-0.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:c0ba68489544df89e4138a14b0ec3e1e5eb102d5d3283a91d9b837c420c0ab97"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66267e4ba21fa66c97ce784a5de2202d3b7d4db3e50bfcdde92830a68f6fae30"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cf55c285b75c178487407721baef4980b3c6515c9c0c1a6c1ea8b001afe658e"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:deeb66bb56490db2157f199c6d9aa2c53f046677be430cc834fc1e74eec6e654"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dfe5b4020939abeeb407b4eead598c954b1573d2d2b4f174f793b196d378b9d9"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84d39506b5f8d86a1195ebde1c66aba168f34ebce6ebd828888f0625cac54774"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f93a27db2dcbbd3ecad36e8df4395d047cb7410e2dc69f6d037674e15442f4ee"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ebc29e501e47ecbfd44c89c0e5c87b2a722049d38b9e93fdd4bea510a82e16ac"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f9cc8c186fea09a94d89e5c9c4e8d05ec3a80e2f6d25673c48efec8117a13cfc"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0757dfde5410c42230b24825ea3ab904a78160520e5ceb953482e133e368733b"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c9f88818cf411f928c29ba295c677cd95773bd256b8490f5655fb489e0c6658c"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e19952f158ebe274c65ffeb294ba378d75048a48f31b77573948d606bed019d5"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:008b1f32c7c68564de8051482b72a5289b6933bca9d9b1ad1474dd448d6768ba"}, + {file = "clickhouse_driver-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:622933cc9834c39f03de5d43a12f13fc7133d31d6d2597e67866d4a549ca9e60"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:92540581e5b5f36d915f14d05c30244870fb123c74b38c645fa47663053c5471"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02dfadc6111b64e01c20b8c11266cab97d4f06685a392a183af437f2f1afb990"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ca17fece86fe85d97705024bec881978271931b3d00db273c9d63244f7d606"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76474f1315ca3ab484ae28ad085b8f756c8b9a755882f93912b2149290482033"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5c0ff12368b34aaf58dd948b0819e5b54d261911de334d3f048328dc9354013"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd441b17294e90e313b08fabf84fcc782c191d2b9b2a924f163928202db6fcc"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62aa158f61d7d84c58e8cd75b3b8340b28607e5a70132395078f578d518aaae3"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcb2a39a1fef8bf1b581f06125c2a84a5b92c939b079d1a95126e3207b05dc77"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f29cc641a65e89a51a15f6d195f565ad2761d1bd653408c6b4046c987c5fb99"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ac1a43690696bda46c9a23fc6fd79b6fe22d428a18e880bdbdf5e6aeb31008c5"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1dd5ea4584c42f85d96ddfa7d07da2abb35a797c45e4d3a66ace149ee4977cad"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a736c0af858a3c83af03848b18754ab18dc594cc7f3bf6be0b1fac682def182c"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win32.whl", hash = "sha256:6cb8ca47f5818c1bc5814b9ff775e383f3c50059b1fd28a02cb9be1b666929f8"}, + {file = "clickhouse_driver-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:a90e7dc92985669a5e6569356bb3028d9d475f95006d4487cb0789aa53f9489c"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04b77cd6c583da9135db4a62c5a7999ae248c2dbfc0cb8e8a3d8a853b1fbfa11"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c7671f8c0e8960d766b2e0eaefcae3088fccdd3920e9cd3dee8e344cfd0a6929"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:502d7cd28522b95a399e993ffd48487e8c12c50ce2d4e89b77b938f945304405"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:969739279f4010e7b5b6b2c9d2ab56a463aed11fdaed5e02424c1b3915f144f8"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed34b60f741eeb02407ea72180d77cbfc368c1be6fc2f2ff8319d1856ce67e10"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a667b48927f4420eb8c03fa33369edfbdf359a788897a01ac945263a2a611461"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f93aa3a90f3847872d7464ec9076482b2e812c4e7d61682daedffdf3471be00"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:190890667215691fdf2155c3b233b39146054ab1cd854c7d91221e6ed633d71e"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ff280aeac5e96c764cd31ba1077c95601337b9a97fb0b9ed4d24c64431f2c322"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01e63e35d2ab55b8eb48facf6e951968c80d27ee6703aa6c91c73d9d0a4d0efe"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a29fb24b910dafc8c11ba882797d13ec0323a97dce80a57673116fa893d1b669"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5f229a7853fc767e63143ea69889d49f6fd5623adc2f7b0f7eb360117d7e91a5"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win32.whl", hash = "sha256:b7f34ad2ed509f48f8ed1f9b96e89765173a7b35d286c7350aa85934a11c0f49"}, + {file = "clickhouse_driver-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:78b166597afbe490cc0cdac44fed8c8b81668f87125601dda17b154f237eef5d"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:16ab64beb8d079cb9b3200539539a35168f524eedf890c9acefb719e25bdc96e"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03e28fd50fc7c54874bf8e638a2ea87f73ae35bfbbf90123fdb395f38d62f159"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0677b8350acd8d186b6acd0026b62dd262d6fee428a5fa3ad9561908d4b02c39"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2f3c9e2182809131701bb28a606dec90525c7ab20490714714a4b3eb015454b"}, + {file = "clickhouse_driver-0.2.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e03a1a1b30cc58c9bd2cbe25bf5e40b1f1d16d52d44ddefb3af50435d1ed613c"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a1be8081306a4beb12444ed8e3208e1eb6c01ed207c471b33009c13504c88139"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:933b40722cbca9b1123a5bb2fb4bafafd234deae0f3481125cb6b6fa1d39aa84"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054b5022f9bf15a5f4663a7cd190f466e70a2d7b8d45429d8742c515b556c10"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61744760ee046c9a268cb801ca21bfe44c4873db9901a7cd0f3ca8830205feff"}, + {file = "clickhouse_driver-0.2.7-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5e28427e05a72e7a4c3672e36703a2d80107ee0b3ab537e3380d726c96b07821"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c483f5ec836ae87803478f2a7b9daf15343078edd6a8be7364dd9db64905bbd0"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28220b794874e68bc2f06dbfff5748f1c5a3236922f59e127abd58d44ae20a3f"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c09877b59b34d5b3043ad70ec31543173cac8b64b4a8afaa89416b22fb28da5"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3580f78db27119f7380627873214ae1342066f1ecb35700c1d7bf418dd70ae73"}, + {file = "clickhouse_driver-0.2.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0842ac1b2f7a9ca46dac2027849b241bccd8eb8ff1c59cb0a5874042b267b733"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7a3fb585e2d3514196258a4a3b0267510c03477f3c2380239ade4c056ba689a7"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48ea25287566d45efbaee0857ad25e8b33ffd7fd73e89424d79fe7f532962915"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee4a4935667b59b4816a5ca77300f5dbe5a7416860551d17376426b8fefc1175"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:358058cfceea9b43c4af9de81842563746f16984b34525a15b41eacf8fc2bed2"}, + {file = "clickhouse_driver-0.2.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ae760fb843dec0b5c398536ca8dfaf243f494ba8fc68132ae1bd62004b0c396a"}, +] + +[package.dependencies] +pytz = "*" +tzlocal = "*" + +[package.extras] +lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"] +numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] +zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] + [[package]] name = "colorama" version = "0.4.6" @@ -3357,214 +3569,6 @@ files = [ {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6c9f64b9724ec38da8e514f404ac64e9a6a5e8b1d7031c2dadd05c1f4c16fd"}, {file = "google_re2-1.1-1-cp39-cp39-win32.whl", hash = "sha256:d1b751b9ab9f8e2ab2a36d72b909281ce65f328c9115a1685acae1a2d1afd7a4"}, {file = "google_re2-1.1-1-cp39-cp39-win_amd64.whl", hash = "sha256:ac775c75cec7069351d201da4e0fb0cae4c1c5ebecd08fa34e1be89740c1d80b"}, - {file = "google_re2-1.1-2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5eaefe4705b75ca5f78178a50104b689e9282f868e12f119b26b4cffc0c7ee6e"}, - {file = "google_re2-1.1-2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:e35f2c8aabfaaa4ce6420b3cae86c0c29042b1b4f9937254347e9b985694a171"}, - {file = "google_re2-1.1-2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:35fd189cbaaaa39c9a6a8a00164c8d9c709bacd0c231c694936879609beff516"}, - {file = "google_re2-1.1-2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:60475d222cebd066c80414831c8a42aa2449aab252084102ee05440896586e6a"}, - {file = "google_re2-1.1-2-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:871cb85b9b0e1784c983b5c148156b3c5314cb29ca70432dff0d163c5c08d7e5"}, - {file = "google_re2-1.1-2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:94f4e66e34bdb8de91ec6cdf20ba4fa9fea1dfdcfb77ff1f59700d01a0243664"}, - {file = "google_re2-1.1-2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1563577e2b720d267c4cffacc0f6a2b5c8480ea966ebdb1844fbea6602c7496f"}, - {file = "google_re2-1.1-2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49b7964532a801b96062d78c0222d155873968f823a546a3dbe63d73f25bb56f"}, - {file = "google_re2-1.1-2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2362fd70eb639a75fd0187d28b4ba7b20b3088833d8ad7ffd8693d0ba159e1c2"}, - {file = "google_re2-1.1-2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86b80719636a4e21391e20a9adf18173ee6ae2ec956726fe2ff587417b5e8ba6"}, - {file = "google_re2-1.1-2-cp310-cp310-win32.whl", hash = "sha256:5456fba09df951fe8d1714474ed1ecda102a68ddffab0113e6c117d2e64e6f2b"}, - {file = "google_re2-1.1-2-cp310-cp310-win_amd64.whl", hash = "sha256:2ac6936a3a60d8d9de9563e90227b3aea27068f597274ca192c999a12d8baa8f"}, - {file = "google_re2-1.1-2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5a87b436028ec9b0f02fe19d4cbc19ef30441085cdfcdf1cce8fbe5c4bd5e9a"}, - {file = "google_re2-1.1-2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:fc0d4163de9ed2155a77e7a2d59d94c348a6bbab3cff88922fab9e0d3d24faec"}, - {file = "google_re2-1.1-2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:48b12d953bc796736e7831d67b36892fb6419a4cc44cb16521fe291e594bfe23"}, - {file = "google_re2-1.1-2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:62c780c927cff98c1538439f0ff616f48a9b2e8837c676f53170d8ae5b9e83cb"}, - {file = "google_re2-1.1-2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:04b2aefd768aa4edeef8b273327806c9cb0b82e90ff52eacf5d11003ac7a0db2"}, - {file = "google_re2-1.1-2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:9c90175992346519ee7546d9af9a64541c05b6b70346b0ddc54a48aa0d3b6554"}, - {file = "google_re2-1.1-2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22ad9ad9d125249d6386a2e80efb9de7af8260b703b6be7fa0ab069c1cf56ced"}, - {file = "google_re2-1.1-2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70971f6ffe5254e476e71d449089917f50ebf9cf60f9cec80975ab1693777e2"}, - {file = "google_re2-1.1-2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f267499529e64a4abed24c588f355ebe4700189d434d84a7367725f5a186e48d"}, - {file = "google_re2-1.1-2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b632eff5e4cd44545a9c0e52f2e1becd55831e25f4dd4e0d7ec8ee6ca50858c1"}, - {file = "google_re2-1.1-2-cp311-cp311-win32.whl", hash = "sha256:a42c733036e8f242ee4e5f0e27153ad4ca44ced9e4ce82f3972938ddee528db0"}, - {file = "google_re2-1.1-2-cp311-cp311-win_amd64.whl", hash = "sha256:64f8eed4ca96905d99b5286b3d14b5ca4f6a025ff3c1351626a7df2f93ad1ddd"}, - {file = "google_re2-1.1-2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5541efcca5b5faf7e0d882334a04fa479bad4e7433f94870f46272eec0672c4a"}, - {file = "google_re2-1.1-2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92309af35b6eb2d3b3dc57045cdd83a76370958ab3e0edd2cc4638f6d23f5b32"}, - {file = "google_re2-1.1-2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:197cd9bcaba96d18c5bf84d0c32fca7a26c234ea83b1d3083366f4392cb99f78"}, - {file = "google_re2-1.1-2-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:1b896f171d29b541256cf26e10dccc9103ac1894683914ed88828ca6facf8dca"}, - {file = "google_re2-1.1-2-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:e022d3239b945014e916ca7120fee659b246ec26c301f9e0542f1a19b38a8744"}, - {file = "google_re2-1.1-2-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:2c73f8a9440873b68bee1198094377501065e85aaf6fcc0d2512c7589ffa06ca"}, - {file = "google_re2-1.1-2-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:901d86555bd7725506d651afaba7d71cd4abd13260aed6cfd7c641a45f76d4f6"}, - {file = "google_re2-1.1-2-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce4710ff636701cfb56eb91c19b775d53b03749a23b7d2a5071bbbf4342a9067"}, - {file = "google_re2-1.1-2-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76a20e5ebdf5bc5d430530197e42a2eeb562f729d3a3fb51f39168283d676e66"}, - {file = "google_re2-1.1-2-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77c9f4d4bb1c8de9d2642d3c4b8b615858ba764df025b3b4f1310266f8def269"}, - {file = "google_re2-1.1-2-cp38-cp38-win32.whl", hash = "sha256:94bd60785bf37ef130a1613738e3c39465a67eae3f3be44bb918540d39b68da3"}, - {file = "google_re2-1.1-2-cp38-cp38-win_amd64.whl", hash = "sha256:59efeb77c0dcdbe37794c61f29c5b1f34bc06e8ec309a111ccdd29d380644d70"}, - {file = "google_re2-1.1-2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:221e38c27e1dd9ccb8e911e9c7aed6439f68ce81e7bb74001076830b0d6e931d"}, - {file = "google_re2-1.1-2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d9145879e6c2e1b814445300b31f88a675e1f06c57564670d95a1442e8370c27"}, - {file = "google_re2-1.1-2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:c8a12f0740e2a52826bdbf95569a4b0abdf413b4012fa71e94ad25dd4715c6e5"}, - {file = "google_re2-1.1-2-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9c9998f71466f4db7bda752aa7c348b2881ff688e361108fe500caad1d8b9cb2"}, - {file = "google_re2-1.1-2-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:0c39f69b702005963a3d3bf78743e1733ad73efd7e6e8465d76e3009e4694ceb"}, - {file = "google_re2-1.1-2-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:6d0ce762dee8d6617d0b1788a9653e805e83a23046c441d0ea65f1e27bf84114"}, - {file = "google_re2-1.1-2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ecf3619d98c9b4a7844ab52552ad32597cdbc9a5bdbc7e3435391c653600d1e2"}, - {file = "google_re2-1.1-2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a1426a8cbd1fa004974574708d496005bd379310c4b1c7012be4bc75efde7a8"}, - {file = "google_re2-1.1-2-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1a30626ba48b4070f3eab272d860ef1952e710b088792c4d68dddb155be6bfc"}, - {file = "google_re2-1.1-2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b9c1ffcfbc3095b6ff601ec2d2bf662988f6ea6763bc1c9d52bec55881f8fde"}, - {file = "google_re2-1.1-2-cp39-cp39-win32.whl", hash = "sha256:32ecf995a252c0548404c1065ba4b36f1e524f1f4a86b6367a1a6c3da3801e30"}, - {file = "google_re2-1.1-2-cp39-cp39-win_amd64.whl", hash = "sha256:e7865410f3b112a3609739283ec3f4f6f25aae827ff59c6bfdf806fd394d753e"}, - {file = "google_re2-1.1-3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3b21f83f0a201009c56f06fcc7294a33555ede97130e8a91b3f4cae01aed1d73"}, - {file = "google_re2-1.1-3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b38194b91354a38db1f86f25d09cdc6ac85d63aee4c67b43da3048ce637adf45"}, - {file = "google_re2-1.1-3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e7da3da8d6b5a18d6c3b61b11cc5b66b8564eaedce99d2312b15b6487730fc76"}, - {file = "google_re2-1.1-3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:aeca656fb10d8638f245331aabab59c9e7e051ca974b366dd79e6a9efb12e401"}, - {file = "google_re2-1.1-3-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:2069d6dc94f5fa14a159bf99cad2f11e9c0f8ec3b7f44a4dde9e59afe5d1c786"}, - {file = "google_re2-1.1-3-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:2319a39305a4931cb5251451f2582713418a19bef2af7adf9e2a7a0edd939b99"}, - {file = "google_re2-1.1-3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb98fc131699756c6d86246f670a5e1c1cc1ba85413c425ad344cb30479b246c"}, - {file = "google_re2-1.1-3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6e038986d8ffe4e269f8532f03009f229d1f6018d4ac0dabc8aff876338f6e0"}, - {file = "google_re2-1.1-3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8618343ee658310e0f53bf586fab7409de43ce82bf8d9f7eb119536adc9783fd"}, - {file = "google_re2-1.1-3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8140ca861cfe00602319cefe2c7b8737b379eb07fb328b51dc44584f47a2718"}, - {file = "google_re2-1.1-3-cp310-cp310-win32.whl", hash = "sha256:41f439c5c54e8a3a0a1fa2dbd1e809d3f643f862df7b16dd790f36a1238a272e"}, - {file = "google_re2-1.1-3-cp310-cp310-win_amd64.whl", hash = "sha256:fe20e97a33176d96d3e4b5b401de35182b9505823abea51425ec011f53ef5e56"}, - {file = "google_re2-1.1-3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c39ff52b1765db039f690ee5b7b23919d8535aae94db7996079fbde0098c4d7"}, - {file = "google_re2-1.1-3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5420be674fd164041639ba4c825450f3d4bd635572acdde16b3dcd697f8aa3ef"}, - {file = "google_re2-1.1-3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ff53881cf1ce040f102a42d39db93c3f835f522337ae9c79839a842f26d97733"}, - {file = "google_re2-1.1-3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:8d04600b0b53523118df2e413a71417c408f20dee640bf07dfab601c96a18a77"}, - {file = "google_re2-1.1-3-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:c4835d4849faa34a7fa1074098d81c420ed6c0707a3772482b02ce14f2a7c007"}, - {file = "google_re2-1.1-3-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:3309a9b81251d35fee15974d0ae0581a9a375266deeafdc3a3ac0d172a742357"}, - {file = "google_re2-1.1-3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e2b51cafee7e0bc72d0a4a454547bd8f257cde412ac9f1a2dc46a203b5e42cf4"}, - {file = "google_re2-1.1-3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:83f5f1cb52f832c2297d271ee8c56cf5e9053448162e5d2223d513f729bad908"}, - {file = "google_re2-1.1-3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55865a1ace92be3f7953b2e2b38b901d8074a367aa491daee43260a53a7fc6f0"}, - {file = "google_re2-1.1-3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cec2167dd142e583e98c783bd0d28b8cf5a9cdbe1f7407ba4163fe3ccb613cb9"}, - {file = "google_re2-1.1-3-cp311-cp311-win32.whl", hash = "sha256:a0bc1fe96849e4eb8b726d0bba493f5b989372243b32fe20729cace02e5a214d"}, - {file = "google_re2-1.1-3-cp311-cp311-win_amd64.whl", hash = "sha256:e6310a156db96fc5957cb007dd2feb18476898654530683897469447df73a7cd"}, - {file = "google_re2-1.1-3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e63cd10ea006088b320e8c5d308da1f6c87aa95138a71c60dd7ca1c8e91927e"}, - {file = "google_re2-1.1-3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:12b566830a334178733a85e416b1e0507dbc0ceb322827616fe51ef56c5154f1"}, - {file = "google_re2-1.1-3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:442e18c9d46b225c1496919c16eafe8f8d9bb4091b00b4d3440da03c55bbf4ed"}, - {file = "google_re2-1.1-3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:c54c00263a9c39b2dacd93e9636319af51e3cf885c080b9680a9631708326460"}, - {file = "google_re2-1.1-3-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:15a3caeeb327bc22e0c9f95eb76890fec8874cacccd2b01ff5c080ab4819bbec"}, - {file = "google_re2-1.1-3-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:59ec0d2cced77f715d41f6eafd901f6b15c11e28ba25fe0effdc1de554d78e75"}, - {file = "google_re2-1.1-3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:185bf0e3441aed3840590f8e42f916e2920d235eb14df2cbc2049526803d3e71"}, - {file = "google_re2-1.1-3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:586d3f2014eea5be14d8de53374d9b79fa99689160e00efa64b5fe93af326087"}, - {file = "google_re2-1.1-3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc2575082de4ffd234d9607f3ae67ca22b15a1a88793240e2045f3b3a36a5795"}, - {file = "google_re2-1.1-3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:59c5ad438eddb3630def394456091284d7bbc5b89351987f94f3792d296d1f96"}, - {file = "google_re2-1.1-3-cp312-cp312-win32.whl", hash = "sha256:5b9878c53f2bf16f75bf71d4ddd57f6611351408d5821040e91c53ebdf82c373"}, - {file = "google_re2-1.1-3-cp312-cp312-win_amd64.whl", hash = "sha256:4fdecfeb213110d0a85bad335a8e7cdb59fea7de81a4fe659233f487171980f9"}, - {file = "google_re2-1.1-3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2dd87bacab32b709c28d0145fe75a956b6a39e28f0726d867375dba5721c76c1"}, - {file = "google_re2-1.1-3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:55d24c61fe35dddc1bb484593a57c9f60f9e66d7f31f091ef9608ed0b6dde79f"}, - {file = "google_re2-1.1-3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a0cf1180d908622df648c26b0cd09281f92129805ccc56a39227fdbfeab95cb4"}, - {file = "google_re2-1.1-3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:09586f07f3f88d432265c75976da1c619ab7192cd7ebdf53f4ae0776c19e4b56"}, - {file = "google_re2-1.1-3-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:539f1b053402203576e919a06749198da4ae415931ee28948a1898131ae932ce"}, - {file = "google_re2-1.1-3-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:abf0bcb5365b0e27a5a23f3da403dffdbbac2c0e3a3f1535a8b10cc121b5d5fb"}, - {file = "google_re2-1.1-3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:19c83e5bbed7958213eeac3aa71c506525ce54faf03e07d0b96cd0a764890511"}, - {file = "google_re2-1.1-3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3348e77330ff672dc44ec01894fa5d93c409a532b6d688feac55e714e9059920"}, - {file = "google_re2-1.1-3-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06b63edb57c5ce5a13eabfd71155e346b9477dc8906dec7c580d4f70c16a7e0d"}, - {file = "google_re2-1.1-3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12fe57ba2914092b83338d61d8def9ebd5a2bd0fd8679eceb5d4c2748105d5c0"}, - {file = "google_re2-1.1-3-cp38-cp38-win32.whl", hash = "sha256:80796e08d24e606e675019fe8de4eb5c94bb765be13c384f2695247d54a6df75"}, - {file = "google_re2-1.1-3-cp38-cp38-win_amd64.whl", hash = "sha256:3c2257dedfe7cc5deb6791e563af9e071a9d414dad89e37ac7ad22f91be171a9"}, - {file = "google_re2-1.1-3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43a0cd77c87c894f28969ac622f94b2e6d1571261dfdd785026848a25cfdc9b9"}, - {file = "google_re2-1.1-3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1038990b77fd66f279bd66a0832b67435ea925e15bb59eafc7b60fdec812b616"}, - {file = "google_re2-1.1-3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fb5dda6875d18dd45f0f24ebced6d1f7388867c8fb04a235d1deab7ea479ce38"}, - {file = "google_re2-1.1-3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb1d164965c6d57a351b421d2f77c051403766a8b75aaa602324ee2451fff77f"}, - {file = "google_re2-1.1-3-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:a072ebfa495051d07ffecbf6ce21eb84793568d5c3c678c00ed8ff6b8066ab31"}, - {file = "google_re2-1.1-3-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:4eb66c8398c8a510adc97978d944b3b29c91181237218841ea1a91dc39ec0e54"}, - {file = "google_re2-1.1-3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f7c8b57b1f559553248d1757b7fa5b2e0cc845666738d155dff1987c2618264e"}, - {file = "google_re2-1.1-3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9162f6aa4f25453c682eb176f21b8e2f40205be9f667e98a54b3e1ff10d6ee75"}, - {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2d65ddf67fd7bf94705626871d463057d3d9a3538d41022f95b9d8f01df36e1"}, - {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d140c7b9395b4d1e654127aa1c99bcc603ed01000b7bc7e28c52562f1894ec12"}, - {file = "google_re2-1.1-3-cp39-cp39-win32.whl", hash = "sha256:80c5fc200f64b2d903eeb07b8d6cefc620a872a0240c7caaa9aca05b20f5568f"}, - {file = "google_re2-1.1-3-cp39-cp39-win_amd64.whl", hash = "sha256:9eb6dbcee9b5dc4069bbc0634f2eb039ca524a14bed5868fdf6560aaafcbca06"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0db114d7e1aa96dbcea452a40136d7d747d60cbb61394965774688ef59cccd4e"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:82133958e003a1344e5b7a791b9a9dd7560b5c8f96936dbe16f294604524a633"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:9e74fd441d1f3d917d3303e319f61b82cdbd96b9a5ba919377a6eef1504a1e2b"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:734a2e7a4541c57253b5ebee24f3f3366ba3658bcad01da25fb623c78723471a"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:d88d5eecbc908abe16132456fae13690d0508f3ac5777f320ef95cb6cab9a961"}, - {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:b91db80b171ecec435a07977a227757dd487356701a32f556fa6fca5d0a40522"}, - {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b23129887a64bb9948af14c84705273ed1a40054e99433b4acccab4dcf6a226"}, - {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5dc1a0cc7cd19261dcaf76763e2499305dbb7e51dc69555167cdb8af98782698"}, - {file = "google_re2-1.1-4-cp310-cp310-win32.whl", hash = "sha256:3b2ab1e2420b5dd9743a2d6bc61b64e5f708563702a75b6db86637837eaeaf2f"}, - {file = "google_re2-1.1-4-cp310-cp310-win_amd64.whl", hash = "sha256:92efca1a7ef83b6df012d432a1cbc71d10ff42200640c0f9a5ff5b343a48e633"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:854818fd4ce79787aca5ba459d6e5abe4ca9be2c684a5b06a7f1757452ca3708"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:4ceef51174b6f653b6659a8fdaa9c38960c5228b44b25be2a3bcd8566827554f"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:ee49087c3db7e6f5238105ab5299c09e9b77516fe8cfb0a37e5f1e813d76ecb8"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:dc2312854bdc01410acc5d935f1906a49cb1f28980341c20a68797ad89d8e178"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0dc0d2e42296fa84a3cb3e1bd667c6969389cd5cdf0786e6b1f911ae2d75375b"}, - {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6bf04ced98453b035f84320f348f67578024f44d2997498def149054eb860ae8"}, - {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d6b6ef11dc4ab322fa66c2f3561925f2b5372a879c3ed764d20e939e2fd3e5f"}, - {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0dcde6646fa9a97fd3692b3f6ae7daf7f3277d7500b6c253badeefa11db8956a"}, - {file = "google_re2-1.1-4-cp311-cp311-win32.whl", hash = "sha256:5f4f0229deb057348893574d5b0a96d055abebac6debf29d95b0c0e26524c9f6"}, - {file = "google_re2-1.1-4-cp311-cp311-win_amd64.whl", hash = "sha256:4713ddbe48a18875270b36a462b0eada5e84d6826f8df7edd328d8706b6f9d07"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:40a698300b8faddbb325662973f839489c89b960087060bd389c376828978a04"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:103d2d7ac92ba23911a151fd1fc7035cbf6dc92a7f6aea92270ebceb5cd5acd3"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:51fb7182bccab05e8258a2b6a63dda1a6b4a9e8dfb9b03ec50e50c49c2827dd4"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:65383022abd63d7b620221eba7935132b53244b8b463d8fdce498c93cf58b7b7"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396281fc68a9337157b3ffcd9392c6b7fcb8aab43e5bdab496262a81d56a4ecc"}, - {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8198adcfcff1c680e052044124621730fc48d08005f90a75487f5651f1ebfce2"}, - {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81f7bff07c448aec4db9ca453d2126ece8710dbd9278b8bb09642045d3402a96"}, - {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7dacf730fd7d6ec71b11d6404b0b26e230814bfc8e9bb0d3f13bec9b5531f8d"}, - {file = "google_re2-1.1-4-cp312-cp312-win32.whl", hash = "sha256:8c764f62f4b1d89d1ef264853b6dd9fee14a89e9b86a81bc2157fe3531425eb4"}, - {file = "google_re2-1.1-4-cp312-cp312-win_amd64.whl", hash = "sha256:0be2666df4bc5381a5d693585f9bbfefb0bfd3c07530d7e403f181f5de47254a"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:5cb1b63a0bfd8dd65d39d2f3b2e5ae0a06ce4b2ce5818a1d1fc78a786a252673"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:e41751ce6b67a95230edd0772226dc94c2952a2909674cd69df9804ed0125307"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:b998cfa2d50bf4c063e777c999a7e8645ec7e5d7baf43ad71b1e2e10bb0300c3"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:226ca3b0c2e970f3fc82001ac89e845ecc7a4bb7c68583e7a76cda70b61251a7"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:9adec1f734ebad7c72e56c85f205a281d8fe9bf6583bc21020157d3f2812ce89"}, - {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:9c34f3c64ba566af967d29e11299560e6fdfacd8ca695120a7062b6ed993b179"}, - {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b85385fe293838e0d0b6e19e6c48ba8c6f739ea92ce2e23b718afe7b343363"}, - {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4694daa8a8987cfb568847aa872f9990e930c91a68c892ead876411d4b9012c3"}, - {file = "google_re2-1.1-4-cp38-cp38-win32.whl", hash = "sha256:5e671e9be1668187e2995aac378de574fa40df70bb6f04657af4d30a79274ce0"}, - {file = "google_re2-1.1-4-cp38-cp38-win_amd64.whl", hash = "sha256:f66c164d6049a8299f6dfcfa52d1580576b4b9724d6fcdad2f36f8f5da9304b6"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:25cb17ae0993a48c70596f3a3ef5d659638106401cc8193f51c0d7961b3b3eb7"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:5f101f86d14ca94ca4dcf63cceaa73d351f2be2481fcaa29d9e68eeab0dc2a88"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:4e82591e85bf262a6d74cff152867e05fc97867c68ba81d6836ff8b0e7e62365"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:1f61c09b93ffd34b1e2557e5a9565039f935407a5786dbad46f64f1a484166e6"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:12b390ad8c7e74bab068732f774e75e0680dade6469b249a721f3432f90edfc3"}, - {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:1284343eb31c2e82ed2d8159f33ba6842238a56782c881b07845a6d85613b055"}, - {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c7b38e0daf2c06e4d3163f4c732ab3ad2521aecfed6605b69e4482c612da303"}, - {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f4d4f0823e8b2f6952a145295b1ff25245ce9bb136aff6fe86452e507d4c1dd"}, - {file = "google_re2-1.1-4-cp39-cp39-win32.whl", hash = "sha256:1afae56b2a07bb48cfcfefaa15ed85bae26a68f5dc7f9e128e6e6ea36914e847"}, - {file = "google_re2-1.1-4-cp39-cp39-win_amd64.whl", hash = "sha256:aa7d6d05911ab9c8adbf3c225a7a120ab50fd2784ac48f2f0d140c0b7afc2b55"}, - {file = "google_re2-1.1-5-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:222fc2ee0e40522de0b21ad3bc90ab8983be3bf3cec3d349c80d76c8bb1a4beb"}, - {file = "google_re2-1.1-5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d4763b0b9195b72132a4e7de8e5a9bf1f05542f442a9115aa27cfc2a8004f581"}, - {file = "google_re2-1.1-5-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:209649da10c9d4a93d8a4d100ecbf9cc3b0252169426bec3e8b4ad7e57d600cf"}, - {file = "google_re2-1.1-5-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:68813aa333c1604a2df4a495b2a6ed065d7c8aebf26cc7e7abb5a6835d08353c"}, - {file = "google_re2-1.1-5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:370a23ec775ad14e9d1e71474d56f381224dcf3e72b15d8ca7b4ad7dd9cd5853"}, - {file = "google_re2-1.1-5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:14664a66a3ddf6bc9e56f401bf029db2d169982c53eff3f5876399104df0e9a6"}, - {file = "google_re2-1.1-5-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea3722cc4932cbcebd553b69dce1b4a73572823cff4e6a244f1c855da21d511"}, - {file = "google_re2-1.1-5-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e14bb264c40fd7c627ef5678e295370cd6ba95ca71d835798b6e37502fc4c690"}, - {file = "google_re2-1.1-5-cp310-cp310-win32.whl", hash = "sha256:39512cd0151ea4b3969c992579c79b423018b464624ae955be685fc07d94556c"}, - {file = "google_re2-1.1-5-cp310-cp310-win_amd64.whl", hash = "sha256:ac66537aa3bc5504320d922b73156909e3c2b6da19739c866502f7827b3f9fdf"}, - {file = "google_re2-1.1-5-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5b5ea68d54890c9edb1b930dcb2658819354e5d3f2201f811798bbc0a142c2b4"}, - {file = "google_re2-1.1-5-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:33443511b6b83c35242370908efe2e8e1e7cae749c766b2b247bf30e8616066c"}, - {file = "google_re2-1.1-5-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:413d77bdd5ba0bfcada428b4c146e87707452ec50a4091ec8e8ba1413d7e0619"}, - {file = "google_re2-1.1-5-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:5171686e43304996a34baa2abcee6f28b169806d0e583c16d55e5656b092a414"}, - {file = "google_re2-1.1-5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3b284db130283771558e31a02d8eb8fb756156ab98ce80035ae2e9e3a5f307c4"}, - {file = "google_re2-1.1-5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:296e6aed0b169648dc4b870ff47bd34c702a32600adb9926154569ef51033f47"}, - {file = "google_re2-1.1-5-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:38d50e68ead374160b1e656bbb5d101f0b95fb4cc57f4a5c12100155001480c5"}, - {file = "google_re2-1.1-5-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a0416a35921e5041758948bcb882456916f22845f66a93bc25070ef7262b72a"}, - {file = "google_re2-1.1-5-cp311-cp311-win32.whl", hash = "sha256:a1d59568bbb5de5dd56dd6cdc79907db26cce63eb4429260300c65f43469e3e7"}, - {file = "google_re2-1.1-5-cp311-cp311-win_amd64.whl", hash = "sha256:72f5a2f179648b8358737b2b493549370debd7d389884a54d331619b285514e3"}, - {file = "google_re2-1.1-5-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:cbc72c45937b1dc5acac3560eb1720007dccca7c9879138ff874c7f6baf96005"}, - {file = "google_re2-1.1-5-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:5fadd1417fbef7235fa9453dba4eb102e6e7d94b1e4c99d5fa3dd4e288d0d2ae"}, - {file = "google_re2-1.1-5-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:040f85c63cc02696485b59b187a5ef044abe2f99b92b4fb399de40b7d2904ccc"}, - {file = "google_re2-1.1-5-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:64e3b975ee6d9bbb2420494e41f929c1a0de4bcc16d86619ab7a87f6ea80d6bd"}, - {file = "google_re2-1.1-5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8ee370413e00f4d828eaed0e83b8af84d7a72e8ee4f4bd5d3078bc741dfc430a"}, - {file = "google_re2-1.1-5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:5b89383001079323f693ba592d7aad789d7a02e75adb5d3368d92b300f5963fd"}, - {file = "google_re2-1.1-5-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63cb4fdfbbda16ae31b41a6388ea621510db82feb8217a74bf36552ecfcd50ad"}, - {file = "google_re2-1.1-5-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ebedd84ae8be10b7a71a16162376fd67a2386fe6361ef88c622dcf7fd679daf"}, - {file = "google_re2-1.1-5-cp312-cp312-win32.whl", hash = "sha256:c8e22d1692bc2c81173330c721aff53e47ffd3c4403ff0cd9d91adfd255dd150"}, - {file = "google_re2-1.1-5-cp312-cp312-win_amd64.whl", hash = "sha256:5197a6af438bb8c4abda0bbe9c4fbd6c27c159855b211098b29d51b73e4cbcf6"}, - {file = "google_re2-1.1-5-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b6727e0b98417e114b92688ad2aa256102ece51f29b743db3d831df53faf1ce3"}, - {file = "google_re2-1.1-5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:711e2b6417eb579c61a4951029d844f6b95b9b373b213232efd413659889a363"}, - {file = "google_re2-1.1-5-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:71ae8b3df22c5c154c8af0f0e99d234a450ef1644393bc2d7f53fc8c0a1e111c"}, - {file = "google_re2-1.1-5-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:94a04e214bc521a3807c217d50cf099bbdd0c0a80d2d996c0741dbb995b5f49f"}, - {file = "google_re2-1.1-5-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:a770f75358508a9110c81a1257721f70c15d9bb592a2fb5c25ecbd13566e52a5"}, - {file = "google_re2-1.1-5-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:07c9133357f7e0b17c6694d5dcb82e0371f695d7c25faef2ff8117ef375343ff"}, - {file = "google_re2-1.1-5-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:204ca6b1cf2021548f4a9c29ac015e0a4ab0a7b6582bf2183d838132b60c8fda"}, - {file = "google_re2-1.1-5-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b95857c2c654f419ca684ec38c9c3325c24e6ba7d11910a5110775a557bb18"}, - {file = "google_re2-1.1-5-cp38-cp38-win32.whl", hash = "sha256:347ac770e091a0364e822220f8d26ab53e6fdcdeaec635052000845c5a3fb869"}, - {file = "google_re2-1.1-5-cp38-cp38-win_amd64.whl", hash = "sha256:ec32bb6de7ffb112a07d210cf9f797b7600645c2d5910703fa07f456dd2150e0"}, - {file = "google_re2-1.1-5-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:eb5adf89060f81c5ff26c28e261e6b4997530a923a6093c9726b8dec02a9a326"}, - {file = "google_re2-1.1-5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a22630c9dd9ceb41ca4316bccba2643a8b1d5c198f21c00ed5b50a94313aaf10"}, - {file = "google_re2-1.1-5-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:544dc17fcc2d43ec05f317366375796351dec44058e1164e03c3f7d050284d58"}, - {file = "google_re2-1.1-5-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:19710af5ea88751c7768575b23765ce0dfef7324d2539de576f75cdc319d6654"}, - {file = "google_re2-1.1-5-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:f82995a205e08ad896f4bd5ce4847c834fab877e1772a44e5f262a647d8a1dec"}, - {file = "google_re2-1.1-5-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:63533c4d58da9dc4bc040250f1f52b089911699f0368e0e6e15f996387a984ed"}, - {file = "google_re2-1.1-5-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79e00fcf0cb04ea35a22b9014712d448725ce4ddc9f08cc818322566176ca4b0"}, - {file = "google_re2-1.1-5-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc41afcefee2da6c4ed883a93d7f527c4b960cd1d26bbb0020a7b8c2d341a60a"}, - {file = "google_re2-1.1-5-cp39-cp39-win32.whl", hash = "sha256:486730b5e1f1c31b0abc6d80abe174ce4f1188fe17d1b50698f2bf79dc6e44be"}, - {file = "google_re2-1.1-5-cp39-cp39-win_amd64.whl", hash = "sha256:4de637ca328f1d23209e80967d1b987d6b352cd01b3a52a84b4d742c69c3da6c"}, ] [[package]] @@ -4431,13 +4435,10 @@ files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -4446,7 +4447,6 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -4466,7 +4466,6 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -4476,7 +4475,6 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -4486,7 +4484,6 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -4496,7 +4493,6 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -4507,16 +4503,13 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -4677,16 +4670,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -6653,7 +6636,6 @@ files = [ {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab6bcc8e424e07c1d4ba6df96f7fb963bcb48f590b9456de9ebd03b88084fe8"}, {file = "pymongo-4.6.0-cp312-cp312-win32.whl", hash = "sha256:47aa128be2e66abd9d1a9b0437c62499d812d291f17b55185cb4aa33a5f710a4"}, {file = "pymongo-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:014e7049dd019a6663747ca7dae328943e14f7261f7c1381045dfc26a04fa330"}, - {file = "pymongo-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e24025625bad66895b1bc3ae1647f48f0a92dd014108fb1be404c77f0b69ca67"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:288c21ab9531b037f7efa4e467b33176bc73a0c27223c141b822ab4a0e66ff2a"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:747c84f4e690fbe6999c90ac97246c95d31460d890510e4a3fa61b7d2b87aa34"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:055f5c266e2767a88bb585d01137d9c7f778b0195d3dbf4a487ef0638be9b651"}, @@ -7094,7 +7076,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -7102,15 +7083,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -7127,7 +7101,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -7135,7 +7108,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -8023,7 +7995,6 @@ files = [ {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, @@ -8033,35 +8004,26 @@ files = [ {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, @@ -8644,6 +8606,24 @@ files = [ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = true +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "uc-micro-py" version = "1.0.2" @@ -9064,11 +9044,73 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +[[package]] +name = "zstandard" +version = "0.22.0" +description = "Zstandard bindings for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"}, + {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"}, + {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"}, + {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"}, + {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"}, + {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"}, + {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"}, + {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"}, + {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"}, + {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"}, + {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"}, + {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"}, + {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"}, + {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"}, + {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"}, + {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"}, + {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"}, + {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"}, + {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"}, + {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"}, + {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"}, + {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"}, + {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"}, + {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"}, + {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"}, + {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"}, + {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + [extras] athena = ["botocore", "pyarrow", "pyathena", "s3fs"] az = ["adlfs"] bigquery = ["gcsfs", "google-cloud-bigquery", "grpcio", "pyarrow"] cli = ["cron-descriptor", "pipdeptree"] +clickhouse = ["adlfs", "clickhouse-connect", "clickhouse-driver", "gcsfs", "pyarrow", "s3fs"] databricks = ["databricks-sql-connector"] dbt = ["dbt-athena-community", "dbt-bigquery", "dbt-core", "dbt-databricks", "dbt-duckdb", "dbt-redshift", "dbt-snowflake"] dremio = ["pyarrow"] @@ -9090,4 +9132,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "0bd3559c3b2e0ad8a33bfdb81586f1db8399d862728e8899b259961c8e175abf" +content-hash = "8f76bfb3e8eb515bfa0037e987347b3001b933fed22efc76969a3604ac139352" From 9bda629d82fdde8e30950c15901a33525f5e9446 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 18 Apr 2024 16:25:55 +0200 Subject: [PATCH 096/127] fix merging fix timestamp allow datetime to be parse as time --- dlt/common/time.py | 3 ++- dlt/destinations/impl/bigquery/bigquery.py | 1 - .../impl/clickhouse/clickhouse.py | 19 +++++++++++-------- tests/load/pipeline/test_stage_loading.py | 2 +- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/dlt/common/time.py b/dlt/common/time.py index b7be589b67..47a92581f4 100644 --- a/dlt/common/time.py +++ b/dlt/common/time.py @@ -133,7 +133,6 @@ def ensure_pendulum_time(value: Union[str, datetime.time]) -> pendulum.Time: Returns: A pendulum.Time object """ - if isinstance(value, datetime.time): if isinstance(value, pendulum.Time): return value @@ -142,6 +141,8 @@ def ensure_pendulum_time(value: Union[str, datetime.time]) -> pendulum.Time: result = parse_iso_like_datetime(value) if isinstance(result, pendulum.Time): return result + elif isinstance(result, pendulum.DateTime): + return result.time() else: raise ValueError(f"{value} is not a valid ISO time string.") raise TypeError(f"Cannot coerce {value} to a pendulum.Time object.") diff --git a/dlt/destinations/impl/bigquery/bigquery.py b/dlt/destinations/impl/bigquery/bigquery.py index 0ac042a056..6c9456d723 100644 --- a/dlt/destinations/impl/bigquery/bigquery.py +++ b/dlt/destinations/impl/bigquery/bigquery.py @@ -83,7 +83,6 @@ class BigQueryTypeMapper(TypeMapper): "NUMERIC": "decimal", "BIGNUMERIC": "decimal", "JSON": "complex", - "TIME": "time", } def to_db_decimal_type(self, precision: Optional[int], scale: Optional[int]) -> str: diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 9c1bc5936f..86ec6c730d 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -77,7 +77,7 @@ class ClickHouseTypeMapper(TypeMapper): "double": "Float64", "bool": "Boolean", "date": "Date", - "timestamp": "DateTime('UTC')", + "timestamp": "DateTime64(6,'UTC')", "time": "String", "bigint": "Int64", "binary": "String", @@ -103,9 +103,6 @@ class ClickHouseTypeMapper(TypeMapper): "Decimal": "decimal", } - def to_db_time_type(self, precision: Optional[int], table_format: TTableFormat = None) -> str: - return "DateTime" - def from_db_type( self, db_type: str, precision: Optional[int] = None, scale: Optional[int] = None ) -> TColumnType: @@ -295,10 +292,16 @@ def gen_key_table_clauses( key_clauses: Sequence[str], for_delete: bool, ) -> List[str]: - join_conditions = " AND ".join([c.format(d="d", s="s") for c in key_clauses]) - return [ - f"FROM {root_table_name} AS d JOIN {staging_root_table_name} AS s ON {join_conditions}" - ] + if for_delete: + # clickhouse doesn't support alias in DELETE FROM + return [ + f"FROM {root_table_name} WHERE EXISTS (SELECT 1 FROM" + f" {staging_root_table_name} WHERE" + f" {' OR '.join([c.format(d=root_table_name,s=staging_root_table_name) for c in key_clauses])})" + ] + return SqlMergeJob.gen_key_table_clauses( + root_table_name, staging_root_table_name, key_clauses, for_delete + ) class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): diff --git a/tests/load/pipeline/test_stage_loading.py b/tests/load/pipeline/test_stage_loading.py index 4d289a5384..57c208eb4b 100644 --- a/tests/load/pipeline/test_stage_loading.py +++ b/tests/load/pipeline/test_stage_loading.py @@ -242,7 +242,7 @@ def my_source(): allow_base64_binary = ( destination_config.file_format == "jsonl" and destination_config.destination in ["redshift"] - ) + ) or destination_config.destination in ["clickhouse"] # content must equal assert_all_data_types_row( db_row[:-2], From f9e2920bf0ba2f9c97569c1e1d13fa417b6ea161 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 18 Apr 2024 16:55:50 +0200 Subject: [PATCH 097/127] slightly clean up clickhouse load job --- .../impl/clickhouse/clickhouse.py | 125 ++++++++---------- 1 file changed, 55 insertions(+), 70 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 86ec6c730d..37f48a06e5 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -147,42 +147,67 @@ def __init__( file_name = FileStorage.get_file_name_from_file_path(file_path) super().__init__(file_name) + # prepare files and table qualified_table_name = client.make_qualified_table_name(table_name) + bucket_path = None + if NewReferenceJob.is_reference_job(file_path): + bucket_path = NewReferenceJob.resolve_reference(file_path) + file_name = FileStorage.get_file_name_from_file_path(bucket_path) + ext = cast(SUPPORTED_FILE_FORMATS, os.path.splitext(file_name)[1][1:].lower()) - bucket_path: str = ( - NewReferenceJob.resolve_reference(file_path) - if NewReferenceJob.is_reference_job(file_path) - else "" - ) - file_name = ( - FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name - ) - file_extension = os.path.splitext(file_name)[1][ - 1: - ].lower() # Remove dot (.) from file extension. - - if file_extension not in ["parquet", "jsonl"]: + if ext not in ["parquet", "jsonl"]: raise LoadJobTerminalException( file_path, "ClickHouse loader Only supports parquet and jsonl files." ) + clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[ext] + # local file + if not bucket_path: + # set correct compression for local file + compression = "gz" if FileStorage.is_gzipped(file_path) else "none" + try: + with clickhouse_connect.create_client( + host=client.credentials.host, + port=client.credentials.http_port, + database=client.credentials.database, + user_name=client.credentials.username, + password=client.credentials.password, + secure=bool(client.credentials.secure), + ) as clickhouse_connect_client: + insert_file( + clickhouse_connect_client, + qualified_table_name, + file_path, + fmt=clickhouse_format, + settings={ + "allow_experimental_lightweight_delete": 1, + "allow_experimental_object_type": 1, + "enable_http_compression": 1, + }, + compression=compression, + ) + except clickhouse_connect.driver.exceptions.Error as e: + raise LoadJobTerminalException( + file_path, + f"ClickHouse connection failed due to {e}.", + ) from e + + # all done here + return + + # prepare some vars bucket_url = urlparse(bucket_path) bucket_scheme = bucket_url.scheme - file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) - clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - if file_extension == "parquet": - # Auto works for parquet - compression = "auto" - else: - # It does not work for json + # set compression, for json files compression detection does not work.. + compression = "auto" + if ext == "json": compression = "none" if config.get("data_writer.disable_compression") else "gz" - statement: str = "" - if bucket_scheme in ("s3", "gs", "gcs"): - bucket_http_url = convert_storage_to_http_scheme(bucket_url) - + # credentials + access_key_id = None + secret_access_key = None if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): access_key_id = staging_credentials.aws_access_key_id secret_access_key = staging_credentials.aws_secret_access_key @@ -191,21 +216,14 @@ def __init__( secret_access_key = dlt.config[ "destination.filesystem.credentials.gcp_secret_access_key" ] - else: - access_key_id = None - secret_access_key = None - - structure = "auto" - - table_function = f"SELECT * FROM s3('{bucket_http_url}'" + # build stmt + table_function = f"SELECT * FROM s3('{convert_storage_to_http_scheme(bucket_url)}'" if access_key_id and secret_access_key: table_function += f",'{access_key_id}','{secret_access_key}'" else: table_function += ",NOSIGN" - - table_function += f",'{clickhouse_format}','{structure}','{compression}')" - + table_function += f",'{clickhouse_format}','auto','{compression}')" statement = f"INSERT INTO {qualified_table_name} {table_function}" elif bucket_scheme in ("az", "abfs"): @@ -229,48 +247,15 @@ def __init__( f" azureBlobStorage('{storage_account_url}','{container_name}','{blobpath}','{account_name}','{account_key}','{clickhouse_format}','{compression}')" ) statement = f"INSERT INTO {qualified_table_name} {table_function}" - elif not bucket_path: - # Local filesystem. - if file_extension == "parquet": - compression = "auto" - else: - compression = "gz" if FileStorage.is_gzipped(file_path) else "none" - try: - with clickhouse_connect.create_client( - host=client.credentials.host, - port=client.credentials.http_port, - database=client.credentials.database, - user_name=client.credentials.username, - password=client.credentials.password, - secure=bool(client.credentials.secure), - ) as clickhouse_connect_client: - insert_file( - clickhouse_connect_client, - qualified_table_name, - file_path, - fmt=clickhouse_format, - settings={ - "allow_experimental_lightweight_delete": 1, - "allow_experimental_object_type": 1, - "enable_http_compression": 1, - }, - compression=compression, - ) - except clickhouse_connect.driver.exceptions.Error as e: - raise LoadJobTerminalException( - file_path, - f"ClickHouse connection failed due to {e}.", - ) from e else: raise LoadJobTerminalException( file_path, f"ClickHouse loader does not support '{bucket_scheme}' filesystem.", ) - # Don't use dbapi driver for local files. - if bucket_path: - with client.begin_transaction(): - client.execute_sql(statement) + # run tx + with client.begin_transaction(): + client.execute_sql(statement) def state(self) -> TLoadJobState: return "completed" From ebe0289ee583ed301f907378729b337f5a74c931 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 18 Apr 2024 23:33:03 +0200 Subject: [PATCH 098/127] fix merge job a bit more --- dlt/destinations/impl/clickhouse/clickhouse.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 86ec6c730d..452fb965dc 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -299,10 +299,10 @@ def gen_key_table_clauses( f" {staging_root_table_name} WHERE" f" {' OR '.join([c.format(d=root_table_name,s=staging_root_table_name) for c in key_clauses])})" ] - return SqlMergeJob.gen_key_table_clauses( - root_table_name, staging_root_table_name, key_clauses, for_delete - ) - + join_conditions = " AND ".join([c.format(d="d", s="s") for c in key_clauses]) + return [ + f"FROM {root_table_name} AS d JOIN {staging_root_table_name} AS s ON {join_conditions}" + ] class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() From 6fd243ed219383551506d196496a6e4c4b35cf4b Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Thu, 18 Apr 2024 23:42:30 +0200 Subject: [PATCH 099/127] Refactor key table clause generation in ClickHouse Signed-off-by: Marcel Coetzee --- dlt/destinations/impl/clickhouse/clickhouse.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 37f48a06e5..3d4b31e3c8 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -277,16 +277,10 @@ def gen_key_table_clauses( key_clauses: Sequence[str], for_delete: bool, ) -> List[str]: - if for_delete: - # clickhouse doesn't support alias in DELETE FROM - return [ - f"FROM {root_table_name} WHERE EXISTS (SELECT 1 FROM" - f" {staging_root_table_name} WHERE" - f" {' OR '.join([c.format(d=root_table_name,s=staging_root_table_name) for c in key_clauses])})" - ] - return SqlMergeJob.gen_key_table_clauses( - root_table_name, staging_root_table_name, key_clauses, for_delete - ) + return [ + f"FROM {root_table_name} AS d INNER JOIN {staging_root_table_name} AS s ON" + f" {' AND '.join([c.format(d='d',s='s') for c in key_clauses])}" + ] class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): From 766ecd2c538a3d52655a8858fc882ddb44c9959b Mon Sep 17 00:00:00 2001 From: Dave Date: Fri, 19 Apr 2024 17:25:45 +0200 Subject: [PATCH 100/127] fixes a bunch of tests --- dlt/common/time.py | 2 -- dlt/destinations/impl/clickhouse/__init__.py | 1 + .../impl/clickhouse/clickhouse.py | 4 +--- .../dlt-ecosystem/destinations/clickhouse.md | 5 ++++ tests/cases.py | 10 ++++++-- .../test_clickhouse_table_builder.py | 24 +++++++++---------- tests/load/pipeline/test_arrow_loading.py | 10 +++++--- tests/load/pipeline/test_pipelines.py | 13 +++++++--- tests/load/pipeline/test_stage_loading.py | 18 ++++++++++---- tests/load/test_job_client.py | 7 +++++- 10 files changed, 64 insertions(+), 30 deletions(-) diff --git a/dlt/common/time.py b/dlt/common/time.py index 47a92581f4..733b51b980 100644 --- a/dlt/common/time.py +++ b/dlt/common/time.py @@ -141,8 +141,6 @@ def ensure_pendulum_time(value: Union[str, datetime.time]) -> pendulum.Time: result = parse_iso_like_datetime(value) if isinstance(result, pendulum.Time): return result - elif isinstance(result, pendulum.DateTime): - return result.time() else: raise ValueError(f"{value} is not a valid ISO time string.") raise TypeError(f"Cannot coerce {value} to a pendulum.Time object.") diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 3f30229eb3..427b4e84b5 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -29,6 +29,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps.decimal_precision = (DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE) # Use 'Decimal256' with these defaults. caps.wei_precision = (76, 0) + caps.timestamp_precision = 6 # https://clickhouse.com/docs/en/operations/settings/settings#max_query_size caps.is_max_query_length_in_bytes = True diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 452fb965dc..ce9ac524ec 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -304,6 +304,7 @@ def gen_key_table_clauses( f"FROM {root_table_name} AS d JOIN {staging_root_table_name} AS s ON {join_conditions}" ] + class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() @@ -411,9 +412,6 @@ def get_storage_table(self, table_name: str) -> Tuple[bool, TTableSchemaColumns] schema_table[c[0]] = schema_c # type: ignore return True, schema_table - def get_stored_schema(self) -> StorageSchemaInfo: - return super().get_stored_schema() - @staticmethod def _gen_not_null(v: bool) -> str: # ClickHouse fields are not nullable by default. diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index ed4b8c7516..2a3d6f5123 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -14,6 +14,11 @@ keywords: [ clickhouse, destination, data warehouse ] pip install dlt[clickhouse] ``` +## Dev Todos for docs +* Clickhouse uses string for time +* bytes are converted to base64 strings when using jsonl and regular strings when using parquet +* JSON / complex fields are experimental currently, they are not supported when loading from parquet and nested structures will be changed when loading from jsonl + ## Setup Guide ### 1. Initialize the dlt project diff --git a/tests/cases.py b/tests/cases.py index 83814845a7..15e3fef091 100644 --- a/tests/cases.py +++ b/tests/cases.py @@ -197,6 +197,7 @@ def assert_all_data_types_row( timestamp_precision: int = 6, schema: TTableSchemaColumns = None, expect_filtered_null_columns=False, + allow_string_binary: bool = False, ) -> None: # content must equal # print(db_row) @@ -245,9 +246,14 @@ def assert_all_data_types_row( db_mapping[binary_col] ) # redshift returns binary as hex string except ValueError: - if not allow_base64_binary: + if allow_string_binary: + db_mapping[binary_col] = db_mapping[binary_col].encode("utf-8") + elif allow_base64_binary: + db_mapping[binary_col] = base64.b64decode( + db_mapping[binary_col], validate=True + ) + else: raise - db_mapping[binary_col] = base64.b64decode(db_mapping[binary_col], validate=True) else: db_mapping[binary_col] = bytes(db_mapping[binary_col]) diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index 9db87dc233..f38b29a5cb 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -61,31 +61,31 @@ def test_clickhouse_create_table(clickhouse_client: ClickHouseClient) -> None: assert "`col1` Int64" in sql assert "`col2` Float64" in sql assert "`col3` Boolean" in sql - assert "`col4` DateTime('UTC')" in sql + assert "`col4` DateTime64(6,'UTC')" in sql assert "`col5` String" in sql assert "`col6` Decimal(38,9)" in sql assert "`col7` String" in sql assert "`col8` Decimal(76,0)" in sql assert "`col9` JSON" in sql assert "`col10` Date" in sql - assert "`col11` DateTime" in sql + assert "`col11` String" in sql assert "`col1_null` Nullable(Int64)" in sql assert "`col2_null` Nullable(Float64)" in sql assert "`col3_null` Nullable(Boolean)" in sql - assert "`col4_null` Nullable(DateTime('UTC'))" in sql + assert "`col4_null` Nullable(DateTime64(6,'UTC'))" in sql assert "`col5_null` Nullable(String)" in sql assert "`col6_null` Nullable(Decimal(38,9))" in sql assert "`col7_null` Nullable(String)" in sql assert "`col8_null` Nullable(Decimal(76,0))" in sql assert "`col9_null` JSON" in sql # JSON isn't nullable in clickhouse assert "`col10_null` Nullable(Date)" in sql - assert "`col11_null` Nullable(DateTime)" in sql + assert "`col11_null` Nullable(String)" in sql assert "`col1_precision` Int64" in sql - assert "`col4_precision` DateTime(3,'UTC')" in sql + assert "`col4_precision` DateTime64(3,'UTC')" in sql assert "`col5_precision` String" in sql assert "`col6_precision` Decimal(6,2)" in sql assert "`col7_precision` String" in sql - assert "`col11_precision` DateTime" in sql + assert "`col11_precision` String" in sql def test_clickhouse_alter_table(clickhouse_client: ClickHouseClient) -> None: @@ -103,31 +103,31 @@ def test_clickhouse_alter_table(clickhouse_client: ClickHouseClient) -> None: assert "`col1` Int64" in sql assert "`col2` Float64" in sql assert "`col3` Boolean" in sql - assert "`col4` DateTime('UTC')" in sql + assert "`col4` DateTime64(6,'UTC')" in sql assert "`col5` String" in sql assert "`col6` Decimal(38,9)" in sql assert "`col7` String" in sql assert "`col8` Decimal(76,0)" in sql assert "`col9` JSON" in sql assert "`col10` Date" in sql - assert "`col11` DateTime" in sql + assert "`col11` String" in sql assert "`col1_null` Nullable(Int64)" in sql assert "`col2_null` Nullable(Float64)" in sql assert "`col3_null` Nullable(Boolean)" in sql - assert "`col4_null` Nullable(DateTime('UTC'))" in sql + assert "`col4_null` Nullable(DateTime64(6,'UTC'))" in sql assert "`col5_null` Nullable(String)" in sql assert "`col6_null` Nullable(Decimal(38,9))" in sql assert "`col7_null` Nullable(String)" in sql assert "`col8_null` Nullable(Decimal(76,0))" in sql assert "`col9_null` JSON" in sql assert "`col10_null` Nullable(Date)" in sql - assert "`col11_null` Nullable(DateTime)" in sql + assert "`col11_null` Nullable(String)" in sql assert "`col1_precision` Int64" in sql - assert "`col4_precision` DateTime(3,'UTC')" in sql + assert "`col4_precision` DateTime64(3,'UTC')" in sql assert "`col5_precision` String" in sql assert "`col6_precision` Decimal(6,2)" in sql assert "`col7_precision` String" in sql - assert "`col11_precision` DateTime" in sql + assert "`col11_precision` String" in sql mod_table = deepcopy(TABLE_UPDATE) mod_table.pop(0) diff --git a/tests/load/pipeline/test_arrow_loading.py b/tests/load/pipeline/test_arrow_loading.py index e95de95720..752b62a6cc 100644 --- a/tests/load/pipeline/test_arrow_loading.py +++ b/tests/load/pipeline/test_arrow_loading.py @@ -9,7 +9,7 @@ import dlt from dlt.common import pendulum -from dlt.common.time import reduce_pendulum_datetime_precision +from dlt.common.time import reduce_pendulum_datetime_precision, ensure_pendulum_datetime from dlt.common.utils import uniq_id from tests.load.utils import destinations_configs, DestinationTestConfiguration from tests.load.pipeline.utils import select_data @@ -53,6 +53,7 @@ def test_load_arrow_item( destination_config.destination in ("redshift", "databricks") and destination_config.file_format == "jsonl" ) + include_decimal = not ( destination_config.destination == "databricks" and destination_config.file_format == "jsonl" ) @@ -114,10 +115,12 @@ def some_data(): record["binary"] = record["binary"].hex() if destination_config.destination == "clickhouse": - # Clickhouse needs base64 string for record in records: - if "binary" in record: + # Clickhouse needs base64 string for jsonl + if "binary" in record and destination_config.file_format == "jsonl": record["binary"] = base64.b64encode(record["binary"]).decode("ascii") + if "binary" in record and destination_config.file_format == "parquet": + record["binary"] = record["binary"].decode("ascii") for row in rows: for i in range(len(row)): @@ -141,6 +144,7 @@ def some_data(): for row, expected_row in zip(rows, expected): # Compare without _dlt_id/_dlt_load_id columns + assert row[3] == expected_row[3] assert row[:-2] == expected_row # Load id and dlt_id are set assert row[-2] == load_id diff --git a/tests/load/pipeline/test_pipelines.py b/tests/load/pipeline/test_pipelines.py index d362bab018..9e41e96f4f 100644 --- a/tests/load/pipeline/test_pipelines.py +++ b/tests/load/pipeline/test_pipelines.py @@ -794,8 +794,8 @@ def other_data(): data_types = deepcopy(TABLE_ROW_ALL_DATA_TYPES) column_schemas = deepcopy(TABLE_UPDATE_COLUMNS_SCHEMA) - # parquet on bigquery does not support JSON but we still want to run the test - if destination_config.destination == "bigquery": + # parquet on bigquery and clickhouse does not support JSON but we still want to run the test + if destination_config.destination in ["bigquery", "clickhouse"]: column_schemas["col9_null"]["data_type"] = column_schemas["col9"]["data_type"] = "text" # duckdb 0.9.1 does not support TIME other than 6 @@ -808,7 +808,13 @@ def other_data(): column_schemas["col4_precision"]["precision"] = 6 # drop TIME from databases not supporting it via parquet - if destination_config.destination in ["redshift", "athena", "synapse", "databricks"]: + if destination_config.destination in [ + "redshift", + "athena", + "synapse", + "databricks", + "clickhouse", + ]: data_types.pop("col11") data_types.pop("col11_null") data_types.pop("col11_precision") @@ -865,6 +871,7 @@ def some_source(): schema=column_schemas, parse_complex_strings=destination_config.destination in ["snowflake", "bigquery", "redshift"], + allow_string_binary=destination_config.destination == "clickhouse", timestamp_precision=3 if destination_config.destination in ("athena", "dremio") else 6, ) diff --git a/tests/load/pipeline/test_stage_loading.py b/tests/load/pipeline/test_stage_loading.py index 57c208eb4b..60a7be259b 100644 --- a/tests/load/pipeline/test_stage_loading.py +++ b/tests/load/pipeline/test_stage_loading.py @@ -180,6 +180,7 @@ def test_all_data_types(destination_config: DestinationTestConfiguration) -> Non "redshift", "athena", "databricks", + "clickhouse", ) and destination_config.file_format in ("parquet", "jsonl"): # Redshift copy doesn't support TIME column exclude_types.append("time") @@ -203,9 +204,13 @@ def test_all_data_types(destination_config: DestinationTestConfiguration) -> Non exclude_types=exclude_types, exclude_columns=exclude_columns ) - # bigquery cannot load into JSON fields from parquet + # clickhouse json is experimental, will not work for parquet and makes strange changes for jsonl + if destination_config.destination in ["clickhouse"]: + column_schemas["col9_null"]["data_type"] = column_schemas["col9"]["data_type"] = "text" + + # bigquery and clickhouse cannot load into JSON fields from parquet if destination_config.file_format == "parquet": - if destination_config.destination == "bigquery": + if destination_config.destination in ["bigquery"]: # change datatype to text and then allow for it in the assert (parse_complex_strings) column_schemas["col9_null"]["data_type"] = column_schemas["col9"]["data_type"] = "text" # redshift cannot load from json into VARBYTE @@ -241,13 +246,18 @@ def my_source(): ) allow_base64_binary = ( destination_config.file_format == "jsonl" - and destination_config.destination in ["redshift"] - ) or destination_config.destination in ["clickhouse"] + and destination_config.destination in ["redshift", "clickhouse"] + ) + allow_string_binary = ( + destination_config.file_format == "parquet" + and destination_config.destination in ["clickhouse"] + ) # content must equal assert_all_data_types_row( db_row[:-2], parse_complex_strings=parse_complex_strings, allow_base64_binary=allow_base64_binary, + allow_string_binary=allow_string_binary, timestamp_precision=sql_client.capabilities.timestamp_precision, schema=column_schemas, ) diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index 4bf15bbde7..ffc05a5e3c 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -129,7 +129,7 @@ def test_get_update_basic_schema(client: SqlJobClientBase) -> None: first_schema._bump_version() assert first_schema.version == this_schema.version == 2 # wait to make load_newest_schema deterministic - sleep(0.1) + sleep(2) client._update_schema_in_storage(first_schema) this_schema = client.get_stored_schema_by_hash(first_schema.version_hash) newest_schema = client.get_stored_schema() @@ -519,6 +519,11 @@ def test_load_with_all_types( ["time"] if client.config.destination_type in ["databricks", "clickhouse"] else None ), ) + + # switch complex to string for clickhouse + if client.config.destination_type in ["clickhouse"]: + column_schemas["col9_null"]["data_type"] = column_schemas["col9"]["data_type"] = "text" + # we should have identical content with all disposition types client.schema.update_table( new_table( From dc3cb76e730a2d55dedc7eef8ccf6d47e20d8035 Mon Sep 17 00:00:00 2001 From: Dave Date: Fri, 19 Apr 2024 17:52:04 +0200 Subject: [PATCH 101/127] simplify clickhouse load job a bit --- .../impl/clickhouse/clickhouse.py | 140 +++++++----------- tests/load/test_sql_client.py | 4 +- 2 files changed, 60 insertions(+), 84 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index ce9ac524ec..83e76870f0 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -148,41 +148,61 @@ def __init__( super().__init__(file_name) qualified_table_name = client.make_qualified_table_name(table_name) + bucket_path = None - bucket_path: str = ( - NewReferenceJob.resolve_reference(file_path) - if NewReferenceJob.is_reference_job(file_path) - else "" - ) - file_name = ( - FileStorage.get_file_name_from_file_path(bucket_path) if bucket_path else file_name - ) - file_extension = os.path.splitext(file_name)[1][ - 1: - ].lower() # Remove dot (.) from file extension. + if NewReferenceJob.is_reference_job(file_path): + bucket_path = NewReferenceJob.resolve_reference(file_path) + file_name = FileStorage.get_file_name_from_file_path(bucket_path) + bucket_url = urlparse(bucket_path) + bucket_scheme = bucket_url.scheme - if file_extension not in ["parquet", "jsonl"]: - raise LoadJobTerminalException( - file_path, "ClickHouse loader Only supports parquet and jsonl files." - ) + ext = cast(SUPPORTED_FILE_FORMATS, os.path.splitext(file_name)[1][1:].lower()) + clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[ext] - bucket_url = urlparse(bucket_path) - bucket_scheme = bucket_url.scheme + compression = "auto" - file_extension = cast(SUPPORTED_FILE_FORMATS, file_extension) - clickhouse_format: str = FILE_FORMAT_TO_TABLE_FUNCTION_MAPPING[file_extension] - if file_extension == "parquet": - # Auto works for parquet - compression = "auto" - else: - # It does not work for json - compression = "none" if config.get("data_writer.disable_compression") else "gz" + # Don't use dbapi driver for local files. + if not bucket_path: + # Local filesystem. + if ext == "jsonl": + compression = "gz" if FileStorage.is_gzipped(file_path) else "none" + try: + with clickhouse_connect.create_client( + host=client.credentials.host, + port=client.credentials.http_port, + database=client.credentials.database, + user_name=client.credentials.username, + password=client.credentials.password, + secure=bool(client.credentials.secure), + ) as clickhouse_connect_client: + insert_file( + clickhouse_connect_client, + qualified_table_name, + file_path, + fmt=clickhouse_format, + settings={ + "allow_experimental_lightweight_delete": 1, + "allow_experimental_object_type": 1, + "enable_http_compression": 1, + }, + compression=compression, + ) + except clickhouse_connect.driver.exceptions.Error as e: + raise LoadJobTerminalException( + file_path, + f"ClickHouse connection failed due to {e}.", + ) from e + return - statement: str = "" + # Auto does not work for jsonl, get info from config for buckets + if ext == "jsonl": + compression = "none" if config.get("data_writer.disable_compression") else "gz" if bucket_scheme in ("s3", "gs", "gcs"): + # get auth and bucket url bucket_http_url = convert_storage_to_http_scheme(bucket_url) - + access_key_id = None + secret_access_key = None if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): access_key_id = staging_credentials.aws_access_key_id secret_access_key = staging_credentials.aws_secret_access_key @@ -191,22 +211,14 @@ def __init__( secret_access_key = dlt.config[ "destination.filesystem.credentials.gcp_secret_access_key" ] - else: - access_key_id = None - secret_access_key = None - - structure = "auto" - - table_function = f"SELECT * FROM s3('{bucket_http_url}'" + auth = "NOSIGN" if access_key_id and secret_access_key: - table_function += f",'{access_key_id}','{secret_access_key}'" - else: - table_function += ",NOSIGN" + auth = f"'{access_key_id}','{secret_access_key}'" - table_function += f",'{clickhouse_format}','{structure}','{compression}')" - - statement = f"INSERT INTO {qualified_table_name} {table_function}" + table_function = ( + f"s3('{bucket_http_url}',{auth},'{clickhouse_format}','auto','{compression}')" + ) elif bucket_scheme in ("az", "abfs"): if not isinstance(staging_credentials, AzureCredentialsWithoutDefaults): @@ -221,56 +233,18 @@ def __init__( f"https://{staging_credentials.azure_storage_account_name}.blob.core.windows.net" ) account_key = staging_credentials.azure_storage_account_key - container_name = bucket_url.netloc - blobpath = bucket_url.path - table_function = ( - "SELECT * FROM" - f" azureBlobStorage('{storage_account_url}','{container_name}','{blobpath}','{account_name}','{account_key}','{clickhouse_format}','{compression}')" - ) - statement = f"INSERT INTO {qualified_table_name} {table_function}" - elif not bucket_path: - # Local filesystem. - if file_extension == "parquet": - compression = "auto" - else: - compression = "gz" if FileStorage.is_gzipped(file_path) else "none" - try: - with clickhouse_connect.create_client( - host=client.credentials.host, - port=client.credentials.http_port, - database=client.credentials.database, - user_name=client.credentials.username, - password=client.credentials.password, - secure=bool(client.credentials.secure), - ) as clickhouse_connect_client: - insert_file( - clickhouse_connect_client, - qualified_table_name, - file_path, - fmt=clickhouse_format, - settings={ - "allow_experimental_lightweight_delete": 1, - "allow_experimental_object_type": 1, - "enable_http_compression": 1, - }, - compression=compression, - ) - except clickhouse_connect.driver.exceptions.Error as e: - raise LoadJobTerminalException( - file_path, - f"ClickHouse connection failed due to {e}.", - ) from e + # build table func + table_function = f"azureBlobStorage('{storage_account_url}','{bucket_url.netloc}','{bucket_url.path}','{account_name}','{account_key}','{clickhouse_format}','{compression}')" else: raise LoadJobTerminalException( file_path, f"ClickHouse loader does not support '{bucket_scheme}' filesystem.", ) - # Don't use dbapi driver for local files. - if bucket_path: - with client.begin_transaction(): - client.execute_sql(statement) + statement = f"INSERT INTO {qualified_table_name} SELECT * FROM {table_function}" + with client.begin_transaction(): + client.execute_sql(statement) def state(self) -> TLoadJobState: return "completed" diff --git a/tests/load/test_sql_client.py b/tests/load/test_sql_client.py index e9ddddcbe0..cba26c36dd 100644 --- a/tests/load/test_sql_client.py +++ b/tests/load/test_sql_client.py @@ -38,7 +38,9 @@ def client(request) -> Iterator[SqlJobClientBase]: @pytest.mark.parametrize( "client", - destinations_configs(default_sql_configs=True, exclude=["mssql", "synapse", "dremio"]), + destinations_configs( + default_sql_configs=True, exclude=["mssql", "synapse", "dremio", "clickhouse"] + ), indirect=True, ids=lambda x: x.name, ) From 9ae69794934b877ae565f6b99380ed67e8779b65 Mon Sep 17 00:00:00 2001 From: Dave Date: Fri, 19 Apr 2024 18:45:02 +0200 Subject: [PATCH 102/127] add sentinel table for dataset existence check --- .../impl/clickhouse/configuration.py | 5 +-- .../impl/clickhouse/sql_client.py | 39 ++++++++++--------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 71d9d1de38..872ffbe66e 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -36,6 +36,8 @@ class ClickHouseCredentials(ConnectionStringCredentials): """Timeout for sending and receiving data. Defaults to 300 seconds.""" dataset_table_separator: str = "___" """Separator for dataset table names, defaults to '___', i.e. 'database.dataset___table'.""" + dataset_sentinel_table_name: str = "dlt_sentinel_table" + """Special table to mark dataset as existing""" __config_gen_annotations__: ClassVar[List[str]] = [ "host", @@ -45,9 +47,6 @@ class ClickHouseCredentials(ConnectionStringCredentials): "database", "username", "password", - "connect_timeout", - "send_receive_timeout", - "dataset_table_separator", ] def parse_native_representation(self, native_value: Any) -> None: diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index fdddf7de3c..8d87df9c07 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -54,7 +54,10 @@ def __init__(self, dataset_name: str, credentials: ClickHouseCredentials) -> Non self.database_name = credentials.database def has_dataset(self) -> bool: - return len(self._list_tables()) > 0 + sentinel_table = self.credentials.dataset_sentinel_table_name + return sentinel_table in [ + t.split(self.credentials.dataset_table_separator)[1] for t in self._list_tables() + ] def open_connection(self) -> clickhouse_driver.dbapi.connection.Connection: self._conn = clickhouse_driver.connect(dsn=self.credentials.to_native_representation()) @@ -90,8 +93,13 @@ def execute_sql( return None if curr.description is None else curr.fetchall() def create_dataset(self) -> None: - # ClickHouse doesn't have schemas. - pass + # We create a sentinel table which defines wether we consider the dataset created + sentinel_table_name = self.make_qualified_table_name( + self.credentials.dataset_sentinel_table_name + ) + self.execute_sql( + f"""CREATE TABLE {sentinel_table_name} (_dlt_id String NOT NULL PRIMARY KEY) ENGINE=MergeTree COMMENT 'internal dlt sentinel table'""" + ) def drop_dataset(self) -> None: # Since ClickHouse doesn't have schemas, we need to drop all tables in our virtual schema, @@ -115,7 +123,7 @@ def _list_tables(self) -> List[str]: """, ( self.database_name, - f"{self.dataset_name}%", + f"{self.dataset_name}{self.credentials.dataset_table_separator}%", ), ) return [row[0] for row in rows] @@ -144,25 +152,20 @@ def execute_query( yield ClickHouseDBApiCursorImpl(cursor) # type: ignore[abstract] def fully_qualified_dataset_name(self, escape: bool = True) -> str: + database_name = self.database_name + dataset_name = self.dataset_name if escape: - database_name = self.capabilities.escape_identifier(self.database_name) - dataset_name = self.capabilities.escape_identifier(f"{self.dataset_name}") - else: - database_name = self.database_name - dataset_name = f"{self.dataset_name}" + database_name = self.capabilities.escape_identifier(database_name) + dataset_name = self.capabilities.escape_identifier(dataset_name) return f"{database_name}.{dataset_name}" def make_qualified_table_name(self, table_name: str, escape: bool = True) -> str: - dataset_table_separator = self.credentials.dataset_table_separator + database_name = self.database_name + table_name = f"{self.dataset_name}{self.credentials.dataset_table_separator}{table_name}" if escape: - database_name = self.capabilities.escape_identifier(self.database_name) - dataset_and_table = self.capabilities.escape_identifier( - f"{self.dataset_name}{dataset_table_separator}{table_name}" - ) - else: - database_name = self.database_name - dataset_and_table = f"{self.dataset_name}{dataset_table_separator}{table_name}" - return f"{database_name}.{dataset_and_table}" + database_name = self.capabilities.escape_identifier(database_name) + table_name = self.capabilities.escape_identifier(table_name) + return f"{database_name}.{table_name}" @classmethod def _make_database_exception(cls, ex: Exception) -> Exception: From 56c2b9fe30a950e79d010edad7dad08100fdeef6 Mon Sep 17 00:00:00 2001 From: Dave Date: Fri, 19 Apr 2024 18:47:41 +0200 Subject: [PATCH 103/127] post merge lockfile update --- poetry.lock | 253 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 252 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 48876a8303..d6e78f4144 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3569,6 +3569,214 @@ files = [ {file = "google_re2-1.1-1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6c9f64b9724ec38da8e514f404ac64e9a6a5e8b1d7031c2dadd05c1f4c16fd"}, {file = "google_re2-1.1-1-cp39-cp39-win32.whl", hash = "sha256:d1b751b9ab9f8e2ab2a36d72b909281ce65f328c9115a1685acae1a2d1afd7a4"}, {file = "google_re2-1.1-1-cp39-cp39-win_amd64.whl", hash = "sha256:ac775c75cec7069351d201da4e0fb0cae4c1c5ebecd08fa34e1be89740c1d80b"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5eaefe4705b75ca5f78178a50104b689e9282f868e12f119b26b4cffc0c7ee6e"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:e35f2c8aabfaaa4ce6420b3cae86c0c29042b1b4f9937254347e9b985694a171"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:35fd189cbaaaa39c9a6a8a00164c8d9c709bacd0c231c694936879609beff516"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:60475d222cebd066c80414831c8a42aa2449aab252084102ee05440896586e6a"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:871cb85b9b0e1784c983b5c148156b3c5314cb29ca70432dff0d163c5c08d7e5"}, + {file = "google_re2-1.1-2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:94f4e66e34bdb8de91ec6cdf20ba4fa9fea1dfdcfb77ff1f59700d01a0243664"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1563577e2b720d267c4cffacc0f6a2b5c8480ea966ebdb1844fbea6602c7496f"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49b7964532a801b96062d78c0222d155873968f823a546a3dbe63d73f25bb56f"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2362fd70eb639a75fd0187d28b4ba7b20b3088833d8ad7ffd8693d0ba159e1c2"}, + {file = "google_re2-1.1-2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86b80719636a4e21391e20a9adf18173ee6ae2ec956726fe2ff587417b5e8ba6"}, + {file = "google_re2-1.1-2-cp310-cp310-win32.whl", hash = "sha256:5456fba09df951fe8d1714474ed1ecda102a68ddffab0113e6c117d2e64e6f2b"}, + {file = "google_re2-1.1-2-cp310-cp310-win_amd64.whl", hash = "sha256:2ac6936a3a60d8d9de9563e90227b3aea27068f597274ca192c999a12d8baa8f"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5a87b436028ec9b0f02fe19d4cbc19ef30441085cdfcdf1cce8fbe5c4bd5e9a"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:fc0d4163de9ed2155a77e7a2d59d94c348a6bbab3cff88922fab9e0d3d24faec"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:48b12d953bc796736e7831d67b36892fb6419a4cc44cb16521fe291e594bfe23"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:62c780c927cff98c1538439f0ff616f48a9b2e8837c676f53170d8ae5b9e83cb"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:04b2aefd768aa4edeef8b273327806c9cb0b82e90ff52eacf5d11003ac7a0db2"}, + {file = "google_re2-1.1-2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:9c90175992346519ee7546d9af9a64541c05b6b70346b0ddc54a48aa0d3b6554"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22ad9ad9d125249d6386a2e80efb9de7af8260b703b6be7fa0ab069c1cf56ced"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70971f6ffe5254e476e71d449089917f50ebf9cf60f9cec80975ab1693777e2"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f267499529e64a4abed24c588f355ebe4700189d434d84a7367725f5a186e48d"}, + {file = "google_re2-1.1-2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b632eff5e4cd44545a9c0e52f2e1becd55831e25f4dd4e0d7ec8ee6ca50858c1"}, + {file = "google_re2-1.1-2-cp311-cp311-win32.whl", hash = "sha256:a42c733036e8f242ee4e5f0e27153ad4ca44ced9e4ce82f3972938ddee528db0"}, + {file = "google_re2-1.1-2-cp311-cp311-win_amd64.whl", hash = "sha256:64f8eed4ca96905d99b5286b3d14b5ca4f6a025ff3c1351626a7df2f93ad1ddd"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5541efcca5b5faf7e0d882334a04fa479bad4e7433f94870f46272eec0672c4a"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92309af35b6eb2d3b3dc57045cdd83a76370958ab3e0edd2cc4638f6d23f5b32"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:197cd9bcaba96d18c5bf84d0c32fca7a26c234ea83b1d3083366f4392cb99f78"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:1b896f171d29b541256cf26e10dccc9103ac1894683914ed88828ca6facf8dca"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:e022d3239b945014e916ca7120fee659b246ec26c301f9e0542f1a19b38a8744"}, + {file = "google_re2-1.1-2-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:2c73f8a9440873b68bee1198094377501065e85aaf6fcc0d2512c7589ffa06ca"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:901d86555bd7725506d651afaba7d71cd4abd13260aed6cfd7c641a45f76d4f6"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce4710ff636701cfb56eb91c19b775d53b03749a23b7d2a5071bbbf4342a9067"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76a20e5ebdf5bc5d430530197e42a2eeb562f729d3a3fb51f39168283d676e66"}, + {file = "google_re2-1.1-2-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77c9f4d4bb1c8de9d2642d3c4b8b615858ba764df025b3b4f1310266f8def269"}, + {file = "google_re2-1.1-2-cp38-cp38-win32.whl", hash = "sha256:94bd60785bf37ef130a1613738e3c39465a67eae3f3be44bb918540d39b68da3"}, + {file = "google_re2-1.1-2-cp38-cp38-win_amd64.whl", hash = "sha256:59efeb77c0dcdbe37794c61f29c5b1f34bc06e8ec309a111ccdd29d380644d70"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:221e38c27e1dd9ccb8e911e9c7aed6439f68ce81e7bb74001076830b0d6e931d"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d9145879e6c2e1b814445300b31f88a675e1f06c57564670d95a1442e8370c27"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:c8a12f0740e2a52826bdbf95569a4b0abdf413b4012fa71e94ad25dd4715c6e5"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9c9998f71466f4db7bda752aa7c348b2881ff688e361108fe500caad1d8b9cb2"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:0c39f69b702005963a3d3bf78743e1733ad73efd7e6e8465d76e3009e4694ceb"}, + {file = "google_re2-1.1-2-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:6d0ce762dee8d6617d0b1788a9653e805e83a23046c441d0ea65f1e27bf84114"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ecf3619d98c9b4a7844ab52552ad32597cdbc9a5bdbc7e3435391c653600d1e2"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a1426a8cbd1fa004974574708d496005bd379310c4b1c7012be4bc75efde7a8"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1a30626ba48b4070f3eab272d860ef1952e710b088792c4d68dddb155be6bfc"}, + {file = "google_re2-1.1-2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b9c1ffcfbc3095b6ff601ec2d2bf662988f6ea6763bc1c9d52bec55881f8fde"}, + {file = "google_re2-1.1-2-cp39-cp39-win32.whl", hash = "sha256:32ecf995a252c0548404c1065ba4b36f1e524f1f4a86b6367a1a6c3da3801e30"}, + {file = "google_re2-1.1-2-cp39-cp39-win_amd64.whl", hash = "sha256:e7865410f3b112a3609739283ec3f4f6f25aae827ff59c6bfdf806fd394d753e"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3b21f83f0a201009c56f06fcc7294a33555ede97130e8a91b3f4cae01aed1d73"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b38194b91354a38db1f86f25d09cdc6ac85d63aee4c67b43da3048ce637adf45"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e7da3da8d6b5a18d6c3b61b11cc5b66b8564eaedce99d2312b15b6487730fc76"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:aeca656fb10d8638f245331aabab59c9e7e051ca974b366dd79e6a9efb12e401"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:2069d6dc94f5fa14a159bf99cad2f11e9c0f8ec3b7f44a4dde9e59afe5d1c786"}, + {file = "google_re2-1.1-3-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:2319a39305a4931cb5251451f2582713418a19bef2af7adf9e2a7a0edd939b99"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb98fc131699756c6d86246f670a5e1c1cc1ba85413c425ad344cb30479b246c"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6e038986d8ffe4e269f8532f03009f229d1f6018d4ac0dabc8aff876338f6e0"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8618343ee658310e0f53bf586fab7409de43ce82bf8d9f7eb119536adc9783fd"}, + {file = "google_re2-1.1-3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8140ca861cfe00602319cefe2c7b8737b379eb07fb328b51dc44584f47a2718"}, + {file = "google_re2-1.1-3-cp310-cp310-win32.whl", hash = "sha256:41f439c5c54e8a3a0a1fa2dbd1e809d3f643f862df7b16dd790f36a1238a272e"}, + {file = "google_re2-1.1-3-cp310-cp310-win_amd64.whl", hash = "sha256:fe20e97a33176d96d3e4b5b401de35182b9505823abea51425ec011f53ef5e56"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c39ff52b1765db039f690ee5b7b23919d8535aae94db7996079fbde0098c4d7"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5420be674fd164041639ba4c825450f3d4bd635572acdde16b3dcd697f8aa3ef"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ff53881cf1ce040f102a42d39db93c3f835f522337ae9c79839a842f26d97733"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:8d04600b0b53523118df2e413a71417c408f20dee640bf07dfab601c96a18a77"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:c4835d4849faa34a7fa1074098d81c420ed6c0707a3772482b02ce14f2a7c007"}, + {file = "google_re2-1.1-3-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:3309a9b81251d35fee15974d0ae0581a9a375266deeafdc3a3ac0d172a742357"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e2b51cafee7e0bc72d0a4a454547bd8f257cde412ac9f1a2dc46a203b5e42cf4"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:83f5f1cb52f832c2297d271ee8c56cf5e9053448162e5d2223d513f729bad908"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55865a1ace92be3f7953b2e2b38b901d8074a367aa491daee43260a53a7fc6f0"}, + {file = "google_re2-1.1-3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cec2167dd142e583e98c783bd0d28b8cf5a9cdbe1f7407ba4163fe3ccb613cb9"}, + {file = "google_re2-1.1-3-cp311-cp311-win32.whl", hash = "sha256:a0bc1fe96849e4eb8b726d0bba493f5b989372243b32fe20729cace02e5a214d"}, + {file = "google_re2-1.1-3-cp311-cp311-win_amd64.whl", hash = "sha256:e6310a156db96fc5957cb007dd2feb18476898654530683897469447df73a7cd"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e63cd10ea006088b320e8c5d308da1f6c87aa95138a71c60dd7ca1c8e91927e"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:12b566830a334178733a85e416b1e0507dbc0ceb322827616fe51ef56c5154f1"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:442e18c9d46b225c1496919c16eafe8f8d9bb4091b00b4d3440da03c55bbf4ed"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:c54c00263a9c39b2dacd93e9636319af51e3cf885c080b9680a9631708326460"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:15a3caeeb327bc22e0c9f95eb76890fec8874cacccd2b01ff5c080ab4819bbec"}, + {file = "google_re2-1.1-3-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:59ec0d2cced77f715d41f6eafd901f6b15c11e28ba25fe0effdc1de554d78e75"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:185bf0e3441aed3840590f8e42f916e2920d235eb14df2cbc2049526803d3e71"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:586d3f2014eea5be14d8de53374d9b79fa99689160e00efa64b5fe93af326087"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc2575082de4ffd234d9607f3ae67ca22b15a1a88793240e2045f3b3a36a5795"}, + {file = "google_re2-1.1-3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:59c5ad438eddb3630def394456091284d7bbc5b89351987f94f3792d296d1f96"}, + {file = "google_re2-1.1-3-cp312-cp312-win32.whl", hash = "sha256:5b9878c53f2bf16f75bf71d4ddd57f6611351408d5821040e91c53ebdf82c373"}, + {file = "google_re2-1.1-3-cp312-cp312-win_amd64.whl", hash = "sha256:4fdecfeb213110d0a85bad335a8e7cdb59fea7de81a4fe659233f487171980f9"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2dd87bacab32b709c28d0145fe75a956b6a39e28f0726d867375dba5721c76c1"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:55d24c61fe35dddc1bb484593a57c9f60f9e66d7f31f091ef9608ed0b6dde79f"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a0cf1180d908622df648c26b0cd09281f92129805ccc56a39227fdbfeab95cb4"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:09586f07f3f88d432265c75976da1c619ab7192cd7ebdf53f4ae0776c19e4b56"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:539f1b053402203576e919a06749198da4ae415931ee28948a1898131ae932ce"}, + {file = "google_re2-1.1-3-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:abf0bcb5365b0e27a5a23f3da403dffdbbac2c0e3a3f1535a8b10cc121b5d5fb"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:19c83e5bbed7958213eeac3aa71c506525ce54faf03e07d0b96cd0a764890511"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3348e77330ff672dc44ec01894fa5d93c409a532b6d688feac55e714e9059920"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06b63edb57c5ce5a13eabfd71155e346b9477dc8906dec7c580d4f70c16a7e0d"}, + {file = "google_re2-1.1-3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12fe57ba2914092b83338d61d8def9ebd5a2bd0fd8679eceb5d4c2748105d5c0"}, + {file = "google_re2-1.1-3-cp38-cp38-win32.whl", hash = "sha256:80796e08d24e606e675019fe8de4eb5c94bb765be13c384f2695247d54a6df75"}, + {file = "google_re2-1.1-3-cp38-cp38-win_amd64.whl", hash = "sha256:3c2257dedfe7cc5deb6791e563af9e071a9d414dad89e37ac7ad22f91be171a9"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43a0cd77c87c894f28969ac622f94b2e6d1571261dfdd785026848a25cfdc9b9"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1038990b77fd66f279bd66a0832b67435ea925e15bb59eafc7b60fdec812b616"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fb5dda6875d18dd45f0f24ebced6d1f7388867c8fb04a235d1deab7ea479ce38"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb1d164965c6d57a351b421d2f77c051403766a8b75aaa602324ee2451fff77f"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:a072ebfa495051d07ffecbf6ce21eb84793568d5c3c678c00ed8ff6b8066ab31"}, + {file = "google_re2-1.1-3-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:4eb66c8398c8a510adc97978d944b3b29c91181237218841ea1a91dc39ec0e54"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f7c8b57b1f559553248d1757b7fa5b2e0cc845666738d155dff1987c2618264e"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9162f6aa4f25453c682eb176f21b8e2f40205be9f667e98a54b3e1ff10d6ee75"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2d65ddf67fd7bf94705626871d463057d3d9a3538d41022f95b9d8f01df36e1"}, + {file = "google_re2-1.1-3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d140c7b9395b4d1e654127aa1c99bcc603ed01000b7bc7e28c52562f1894ec12"}, + {file = "google_re2-1.1-3-cp39-cp39-win32.whl", hash = "sha256:80c5fc200f64b2d903eeb07b8d6cefc620a872a0240c7caaa9aca05b20f5568f"}, + {file = "google_re2-1.1-3-cp39-cp39-win_amd64.whl", hash = "sha256:9eb6dbcee9b5dc4069bbc0634f2eb039ca524a14bed5868fdf6560aaafcbca06"}, + {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0db114d7e1aa96dbcea452a40136d7d747d60cbb61394965774688ef59cccd4e"}, + {file = "google_re2-1.1-4-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:82133958e003a1344e5b7a791b9a9dd7560b5c8f96936dbe16f294604524a633"}, + {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:9e74fd441d1f3d917d3303e319f61b82cdbd96b9a5ba919377a6eef1504a1e2b"}, + {file = "google_re2-1.1-4-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:734a2e7a4541c57253b5ebee24f3f3366ba3658bcad01da25fb623c78723471a"}, + {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:d88d5eecbc908abe16132456fae13690d0508f3ac5777f320ef95cb6cab9a961"}, + {file = "google_re2-1.1-4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:b91db80b171ecec435a07977a227757dd487356701a32f556fa6fca5d0a40522"}, + {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b23129887a64bb9948af14c84705273ed1a40054e99433b4acccab4dcf6a226"}, + {file = "google_re2-1.1-4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5dc1a0cc7cd19261dcaf76763e2499305dbb7e51dc69555167cdb8af98782698"}, + {file = "google_re2-1.1-4-cp310-cp310-win32.whl", hash = "sha256:3b2ab1e2420b5dd9743a2d6bc61b64e5f708563702a75b6db86637837eaeaf2f"}, + {file = "google_re2-1.1-4-cp310-cp310-win_amd64.whl", hash = "sha256:92efca1a7ef83b6df012d432a1cbc71d10ff42200640c0f9a5ff5b343a48e633"}, + {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:854818fd4ce79787aca5ba459d6e5abe4ca9be2c684a5b06a7f1757452ca3708"}, + {file = "google_re2-1.1-4-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:4ceef51174b6f653b6659a8fdaa9c38960c5228b44b25be2a3bcd8566827554f"}, + {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:ee49087c3db7e6f5238105ab5299c09e9b77516fe8cfb0a37e5f1e813d76ecb8"}, + {file = "google_re2-1.1-4-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:dc2312854bdc01410acc5d935f1906a49cb1f28980341c20a68797ad89d8e178"}, + {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0dc0d2e42296fa84a3cb3e1bd667c6969389cd5cdf0786e6b1f911ae2d75375b"}, + {file = "google_re2-1.1-4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6bf04ced98453b035f84320f348f67578024f44d2997498def149054eb860ae8"}, + {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d6b6ef11dc4ab322fa66c2f3561925f2b5372a879c3ed764d20e939e2fd3e5f"}, + {file = "google_re2-1.1-4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0dcde6646fa9a97fd3692b3f6ae7daf7f3277d7500b6c253badeefa11db8956a"}, + {file = "google_re2-1.1-4-cp311-cp311-win32.whl", hash = "sha256:5f4f0229deb057348893574d5b0a96d055abebac6debf29d95b0c0e26524c9f6"}, + {file = "google_re2-1.1-4-cp311-cp311-win_amd64.whl", hash = "sha256:4713ddbe48a18875270b36a462b0eada5e84d6826f8df7edd328d8706b6f9d07"}, + {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:40a698300b8faddbb325662973f839489c89b960087060bd389c376828978a04"}, + {file = "google_re2-1.1-4-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:103d2d7ac92ba23911a151fd1fc7035cbf6dc92a7f6aea92270ebceb5cd5acd3"}, + {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:51fb7182bccab05e8258a2b6a63dda1a6b4a9e8dfb9b03ec50e50c49c2827dd4"}, + {file = "google_re2-1.1-4-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:65383022abd63d7b620221eba7935132b53244b8b463d8fdce498c93cf58b7b7"}, + {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396281fc68a9337157b3ffcd9392c6b7fcb8aab43e5bdab496262a81d56a4ecc"}, + {file = "google_re2-1.1-4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8198adcfcff1c680e052044124621730fc48d08005f90a75487f5651f1ebfce2"}, + {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81f7bff07c448aec4db9ca453d2126ece8710dbd9278b8bb09642045d3402a96"}, + {file = "google_re2-1.1-4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7dacf730fd7d6ec71b11d6404b0b26e230814bfc8e9bb0d3f13bec9b5531f8d"}, + {file = "google_re2-1.1-4-cp312-cp312-win32.whl", hash = "sha256:8c764f62f4b1d89d1ef264853b6dd9fee14a89e9b86a81bc2157fe3531425eb4"}, + {file = "google_re2-1.1-4-cp312-cp312-win_amd64.whl", hash = "sha256:0be2666df4bc5381a5d693585f9bbfefb0bfd3c07530d7e403f181f5de47254a"}, + {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:5cb1b63a0bfd8dd65d39d2f3b2e5ae0a06ce4b2ce5818a1d1fc78a786a252673"}, + {file = "google_re2-1.1-4-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:e41751ce6b67a95230edd0772226dc94c2952a2909674cd69df9804ed0125307"}, + {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:b998cfa2d50bf4c063e777c999a7e8645ec7e5d7baf43ad71b1e2e10bb0300c3"}, + {file = "google_re2-1.1-4-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:226ca3b0c2e970f3fc82001ac89e845ecc7a4bb7c68583e7a76cda70b61251a7"}, + {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:9adec1f734ebad7c72e56c85f205a281d8fe9bf6583bc21020157d3f2812ce89"}, + {file = "google_re2-1.1-4-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:9c34f3c64ba566af967d29e11299560e6fdfacd8ca695120a7062b6ed993b179"}, + {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b85385fe293838e0d0b6e19e6c48ba8c6f739ea92ce2e23b718afe7b343363"}, + {file = "google_re2-1.1-4-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4694daa8a8987cfb568847aa872f9990e930c91a68c892ead876411d4b9012c3"}, + {file = "google_re2-1.1-4-cp38-cp38-win32.whl", hash = "sha256:5e671e9be1668187e2995aac378de574fa40df70bb6f04657af4d30a79274ce0"}, + {file = "google_re2-1.1-4-cp38-cp38-win_amd64.whl", hash = "sha256:f66c164d6049a8299f6dfcfa52d1580576b4b9724d6fcdad2f36f8f5da9304b6"}, + {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:25cb17ae0993a48c70596f3a3ef5d659638106401cc8193f51c0d7961b3b3eb7"}, + {file = "google_re2-1.1-4-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:5f101f86d14ca94ca4dcf63cceaa73d351f2be2481fcaa29d9e68eeab0dc2a88"}, + {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:4e82591e85bf262a6d74cff152867e05fc97867c68ba81d6836ff8b0e7e62365"}, + {file = "google_re2-1.1-4-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:1f61c09b93ffd34b1e2557e5a9565039f935407a5786dbad46f64f1a484166e6"}, + {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:12b390ad8c7e74bab068732f774e75e0680dade6469b249a721f3432f90edfc3"}, + {file = "google_re2-1.1-4-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:1284343eb31c2e82ed2d8159f33ba6842238a56782c881b07845a6d85613b055"}, + {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c7b38e0daf2c06e4d3163f4c732ab3ad2521aecfed6605b69e4482c612da303"}, + {file = "google_re2-1.1-4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f4d4f0823e8b2f6952a145295b1ff25245ce9bb136aff6fe86452e507d4c1dd"}, + {file = "google_re2-1.1-4-cp39-cp39-win32.whl", hash = "sha256:1afae56b2a07bb48cfcfefaa15ed85bae26a68f5dc7f9e128e6e6ea36914e847"}, + {file = "google_re2-1.1-4-cp39-cp39-win_amd64.whl", hash = "sha256:aa7d6d05911ab9c8adbf3c225a7a120ab50fd2784ac48f2f0d140c0b7afc2b55"}, + {file = "google_re2-1.1-5-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:222fc2ee0e40522de0b21ad3bc90ab8983be3bf3cec3d349c80d76c8bb1a4beb"}, + {file = "google_re2-1.1-5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d4763b0b9195b72132a4e7de8e5a9bf1f05542f442a9115aa27cfc2a8004f581"}, + {file = "google_re2-1.1-5-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:209649da10c9d4a93d8a4d100ecbf9cc3b0252169426bec3e8b4ad7e57d600cf"}, + {file = "google_re2-1.1-5-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:68813aa333c1604a2df4a495b2a6ed065d7c8aebf26cc7e7abb5a6835d08353c"}, + {file = "google_re2-1.1-5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:370a23ec775ad14e9d1e71474d56f381224dcf3e72b15d8ca7b4ad7dd9cd5853"}, + {file = "google_re2-1.1-5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:14664a66a3ddf6bc9e56f401bf029db2d169982c53eff3f5876399104df0e9a6"}, + {file = "google_re2-1.1-5-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea3722cc4932cbcebd553b69dce1b4a73572823cff4e6a244f1c855da21d511"}, + {file = "google_re2-1.1-5-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e14bb264c40fd7c627ef5678e295370cd6ba95ca71d835798b6e37502fc4c690"}, + {file = "google_re2-1.1-5-cp310-cp310-win32.whl", hash = "sha256:39512cd0151ea4b3969c992579c79b423018b464624ae955be685fc07d94556c"}, + {file = "google_re2-1.1-5-cp310-cp310-win_amd64.whl", hash = "sha256:ac66537aa3bc5504320d922b73156909e3c2b6da19739c866502f7827b3f9fdf"}, + {file = "google_re2-1.1-5-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5b5ea68d54890c9edb1b930dcb2658819354e5d3f2201f811798bbc0a142c2b4"}, + {file = "google_re2-1.1-5-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:33443511b6b83c35242370908efe2e8e1e7cae749c766b2b247bf30e8616066c"}, + {file = "google_re2-1.1-5-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:413d77bdd5ba0bfcada428b4c146e87707452ec50a4091ec8e8ba1413d7e0619"}, + {file = "google_re2-1.1-5-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:5171686e43304996a34baa2abcee6f28b169806d0e583c16d55e5656b092a414"}, + {file = "google_re2-1.1-5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3b284db130283771558e31a02d8eb8fb756156ab98ce80035ae2e9e3a5f307c4"}, + {file = "google_re2-1.1-5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:296e6aed0b169648dc4b870ff47bd34c702a32600adb9926154569ef51033f47"}, + {file = "google_re2-1.1-5-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:38d50e68ead374160b1e656bbb5d101f0b95fb4cc57f4a5c12100155001480c5"}, + {file = "google_re2-1.1-5-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a0416a35921e5041758948bcb882456916f22845f66a93bc25070ef7262b72a"}, + {file = "google_re2-1.1-5-cp311-cp311-win32.whl", hash = "sha256:a1d59568bbb5de5dd56dd6cdc79907db26cce63eb4429260300c65f43469e3e7"}, + {file = "google_re2-1.1-5-cp311-cp311-win_amd64.whl", hash = "sha256:72f5a2f179648b8358737b2b493549370debd7d389884a54d331619b285514e3"}, + {file = "google_re2-1.1-5-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:cbc72c45937b1dc5acac3560eb1720007dccca7c9879138ff874c7f6baf96005"}, + {file = "google_re2-1.1-5-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:5fadd1417fbef7235fa9453dba4eb102e6e7d94b1e4c99d5fa3dd4e288d0d2ae"}, + {file = "google_re2-1.1-5-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:040f85c63cc02696485b59b187a5ef044abe2f99b92b4fb399de40b7d2904ccc"}, + {file = "google_re2-1.1-5-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:64e3b975ee6d9bbb2420494e41f929c1a0de4bcc16d86619ab7a87f6ea80d6bd"}, + {file = "google_re2-1.1-5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8ee370413e00f4d828eaed0e83b8af84d7a72e8ee4f4bd5d3078bc741dfc430a"}, + {file = "google_re2-1.1-5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:5b89383001079323f693ba592d7aad789d7a02e75adb5d3368d92b300f5963fd"}, + {file = "google_re2-1.1-5-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63cb4fdfbbda16ae31b41a6388ea621510db82feb8217a74bf36552ecfcd50ad"}, + {file = "google_re2-1.1-5-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ebedd84ae8be10b7a71a16162376fd67a2386fe6361ef88c622dcf7fd679daf"}, + {file = "google_re2-1.1-5-cp312-cp312-win32.whl", hash = "sha256:c8e22d1692bc2c81173330c721aff53e47ffd3c4403ff0cd9d91adfd255dd150"}, + {file = "google_re2-1.1-5-cp312-cp312-win_amd64.whl", hash = "sha256:5197a6af438bb8c4abda0bbe9c4fbd6c27c159855b211098b29d51b73e4cbcf6"}, + {file = "google_re2-1.1-5-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b6727e0b98417e114b92688ad2aa256102ece51f29b743db3d831df53faf1ce3"}, + {file = "google_re2-1.1-5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:711e2b6417eb579c61a4951029d844f6b95b9b373b213232efd413659889a363"}, + {file = "google_re2-1.1-5-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:71ae8b3df22c5c154c8af0f0e99d234a450ef1644393bc2d7f53fc8c0a1e111c"}, + {file = "google_re2-1.1-5-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:94a04e214bc521a3807c217d50cf099bbdd0c0a80d2d996c0741dbb995b5f49f"}, + {file = "google_re2-1.1-5-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:a770f75358508a9110c81a1257721f70c15d9bb592a2fb5c25ecbd13566e52a5"}, + {file = "google_re2-1.1-5-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:07c9133357f7e0b17c6694d5dcb82e0371f695d7c25faef2ff8117ef375343ff"}, + {file = "google_re2-1.1-5-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:204ca6b1cf2021548f4a9c29ac015e0a4ab0a7b6582bf2183d838132b60c8fda"}, + {file = "google_re2-1.1-5-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b95857c2c654f419ca684ec38c9c3325c24e6ba7d11910a5110775a557bb18"}, + {file = "google_re2-1.1-5-cp38-cp38-win32.whl", hash = "sha256:347ac770e091a0364e822220f8d26ab53e6fdcdeaec635052000845c5a3fb869"}, + {file = "google_re2-1.1-5-cp38-cp38-win_amd64.whl", hash = "sha256:ec32bb6de7ffb112a07d210cf9f797b7600645c2d5910703fa07f456dd2150e0"}, + {file = "google_re2-1.1-5-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:eb5adf89060f81c5ff26c28e261e6b4997530a923a6093c9726b8dec02a9a326"}, + {file = "google_re2-1.1-5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a22630c9dd9ceb41ca4316bccba2643a8b1d5c198f21c00ed5b50a94313aaf10"}, + {file = "google_re2-1.1-5-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:544dc17fcc2d43ec05f317366375796351dec44058e1164e03c3f7d050284d58"}, + {file = "google_re2-1.1-5-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:19710af5ea88751c7768575b23765ce0dfef7324d2539de576f75cdc319d6654"}, + {file = "google_re2-1.1-5-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:f82995a205e08ad896f4bd5ce4847c834fab877e1772a44e5f262a647d8a1dec"}, + {file = "google_re2-1.1-5-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:63533c4d58da9dc4bc040250f1f52b089911699f0368e0e6e15f996387a984ed"}, + {file = "google_re2-1.1-5-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79e00fcf0cb04ea35a22b9014712d448725ce4ddc9f08cc818322566176ca4b0"}, + {file = "google_re2-1.1-5-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc41afcefee2da6c4ed883a93d7f527c4b960cd1d26bbb0020a7b8c2d341a60a"}, + {file = "google_re2-1.1-5-cp39-cp39-win32.whl", hash = "sha256:486730b5e1f1c31b0abc6d80abe174ce4f1188fe17d1b50698f2bf79dc6e44be"}, + {file = "google_re2-1.1-5-cp39-cp39-win_amd64.whl", hash = "sha256:4de637ca328f1d23209e80967d1b987d6b352cd01b3a52a84b4d742c69c3da6c"}, ] [[package]] @@ -4435,10 +4643,13 @@ files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -4447,6 +4658,7 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -4466,6 +4678,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -4475,6 +4688,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -4484,6 +4698,7 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -4493,6 +4708,7 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -4503,13 +4719,16 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -4670,6 +4889,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -6636,6 +6865,7 @@ files = [ {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab6bcc8e424e07c1d4ba6df96f7fb963bcb48f590b9456de9ebd03b88084fe8"}, {file = "pymongo-4.6.0-cp312-cp312-win32.whl", hash = "sha256:47aa128be2e66abd9d1a9b0437c62499d812d291f17b55185cb4aa33a5f710a4"}, {file = "pymongo-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:014e7049dd019a6663747ca7dae328943e14f7261f7c1381045dfc26a04fa330"}, + {file = "pymongo-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e24025625bad66895b1bc3ae1647f48f0a92dd014108fb1be404c77f0b69ca67"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:288c21ab9531b037f7efa4e467b33176bc73a0c27223c141b822ab4a0e66ff2a"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:747c84f4e690fbe6999c90ac97246c95d31460d890510e4a3fa61b7d2b87aa34"}, {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:055f5c266e2767a88bb585d01137d9c7f778b0195d3dbf4a487ef0638be9b651"}, @@ -7076,6 +7306,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -7083,8 +7314,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -7101,6 +7340,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -7108,6 +7348,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -7995,6 +8236,7 @@ files = [ {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, @@ -8004,26 +8246,35 @@ files = [ {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, @@ -9132,4 +9383,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "8f76bfb3e8eb515bfa0037e987347b3001b933fed22efc76969a3604ac139352" +content-hash = "85921eb9304af4af4126161624e3d5fb6945c1d5877b7d70903199d92639180d" From 1525e738caa46dc40e839b9c09753da74eade010 Mon Sep 17 00:00:00 2001 From: Dave Date: Fri, 19 Apr 2024 20:21:34 +0200 Subject: [PATCH 104/127] add support for scd2 --- dlt/common/data_writers/escape.py | 8 ++++++++ dlt/destinations/impl/clickhouse/__init__.py | 7 ++++++- dlt/destinations/impl/clickhouse/clickhouse.py | 4 ++++ dlt/destinations/sql_jobs.py | 6 +++++- tests/load/test_job_client.py | 3 ++- 5 files changed, 25 insertions(+), 3 deletions(-) diff --git a/dlt/common/data_writers/escape.py b/dlt/common/data_writers/escape.py index 32c0f009f0..cdbb378faa 100644 --- a/dlt/common/data_writers/escape.py +++ b/dlt/common/data_writers/escape.py @@ -213,3 +213,11 @@ def format_bigquery_datetime_literal( """Returns BigQuery-adjusted datetime literal by prefixing required `TIMESTAMP` indicator.""" # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#timestamp_literals return "TIMESTAMP " + format_datetime_literal(v, precision, no_tz) + + +def format_clickhouse_datetime_literal( + v: pendulum.DateTime, precision: int = 6, no_tz: bool = False +) -> str: + """Returns clickhouse compatibel function""" + datetime = format_datetime_literal(v, precision, True) + return f"toDateTime64({datetime}, {precision}, '{v.tzinfo}')" diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 427b4e84b5..852ebf9859 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -1,7 +1,11 @@ import sys from dlt.common.arithmetics import DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE -from dlt.common.data_writers.escape import escape_clickhouse_identifier, escape_clickhouse_literal +from dlt.common.data_writers.escape import ( + escape_clickhouse_identifier, + escape_clickhouse_literal, + format_clickhouse_datetime_literal, +) from dlt.common.destination import DestinationCapabilitiesContext @@ -13,6 +17,7 @@ def capabilities() -> DestinationCapabilitiesContext: caps.preferred_staging_file_format = "jsonl" caps.supported_staging_file_formats = ["parquet", "jsonl"] + caps.format_datetime_literal = format_clickhouse_datetime_literal caps.escape_identifier = escape_clickhouse_identifier caps.escape_literal = escape_clickhouse_literal diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 83e76870f0..c6637c25ef 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -278,6 +278,10 @@ def gen_key_table_clauses( f"FROM {root_table_name} AS d JOIN {staging_root_table_name} AS s ON {join_conditions}" ] + @classmethod + def gen_update_table_prefix(cls, table_name: str) -> str: + return f"ALTER TABLE {table_name} UPDATE" + class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() diff --git a/dlt/destinations/sql_jobs.py b/dlt/destinations/sql_jobs.py index 86eaa9236a..e156361926 100644 --- a/dlt/destinations/sql_jobs.py +++ b/dlt/destinations/sql_jobs.py @@ -541,7 +541,7 @@ def gen_scd2_sql( # retire updated and deleted records sql.append(f""" - UPDATE {root_table_name} SET {to} = {boundary_ts} + {cls.gen_update_table_prefix(root_table_name)} {to} = {boundary_ts} WHERE {to} = {active_record_ts} AND {hash_} NOT IN (SELECT {hash_} FROM {staging_root_table_name}); """) @@ -587,3 +587,7 @@ def gen_scd2_sql( WHERE NOT EXISTS (SELECT 1 FROM {table_name} AS f WHERE f.{unique_column} = s.{unique_column}); """) return sql + + @classmethod + def gen_update_table_prefix(cls, table_name: str) -> str: + return f"UPDATE {table_name} SET" diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index ffc05a5e3c..c120f95e22 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -116,6 +116,7 @@ def test_get_update_basic_schema(client: SqlJobClientBase) -> None: # update in storage client._update_schema_in_storage(schema) + sleep(1) this_schema = client.get_stored_schema_by_hash(schema.version_hash) newest_schema = client.get_stored_schema() assert this_schema == newest_schema @@ -129,7 +130,7 @@ def test_get_update_basic_schema(client: SqlJobClientBase) -> None: first_schema._bump_version() assert first_schema.version == this_schema.version == 2 # wait to make load_newest_schema deterministic - sleep(2) + sleep(1) client._update_schema_in_storage(first_schema) this_schema = client.get_stored_schema_by_hash(first_schema.version_hash) newest_schema = client.get_stored_schema() From 15855d739e668497eebfd760e1e881ca8f8bd220 Mon Sep 17 00:00:00 2001 From: Dave Date: Fri, 19 Apr 2024 20:38:32 +0200 Subject: [PATCH 105/127] add correct high_ts for clickhouse --- dlt/common/destination/capabilities.py | 4 ++++ dlt/destinations/impl/clickhouse/__init__.py | 2 ++ dlt/destinations/sql_jobs.py | 9 +++------ tests/load/pipeline/test_scd2.py | 12 ++++++------ 4 files changed, 15 insertions(+), 12 deletions(-) diff --git a/dlt/common/destination/capabilities.py b/dlt/common/destination/capabilities.py index 8432f8b544..9c633e6a20 100644 --- a/dlt/common/destination/capabilities.py +++ b/dlt/common/destination/capabilities.py @@ -21,6 +21,7 @@ # sql - any sql statement TLoaderFileFormat = Literal["jsonl", "typed-jsonl", "insert_values", "parquet", "csv"] ALL_SUPPORTED_FILE_FORMATS: Set[TLoaderFileFormat] = set(get_args(TLoaderFileFormat)) +HIGH_TS = pendulum.datetime(9999, 12, 31) @configspec @@ -53,6 +54,9 @@ class DestinationCapabilitiesContext(ContainerInjectableContext): insert_values_writer_type: str = "default" supports_multiple_statements: bool = True supports_clone_table: bool = False + scd2_high_timestamp: pendulum.DateTime = HIGH_TS + """High timestamp used to indicate active records in `scd2` merge strategy.""" + """Destination supports CREATE TABLE ... CLONE ... statements""" max_table_nesting: Optional[int] = None # destination can overwrite max table nesting diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 852ebf9859..42c14b4216 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -1,5 +1,6 @@ import sys +from dlt.common.pendulum import pendulum from dlt.common.arithmetics import DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE from dlt.common.data_writers.escape import ( escape_clickhouse_identifier, @@ -24,6 +25,7 @@ def capabilities() -> DestinationCapabilitiesContext: # https://stackoverflow.com/questions/68358686/what-is-the-maximum-length-of-a-column-in-clickhouse-can-it-be-modified caps.max_identifier_length = 255 caps.max_column_identifier_length = 255 + caps.scd2_high_timestamp = pendulum.datetime(2299, 12, 31) # this is the max datetime... # ClickHouse has no max `String` type length. caps.max_text_data_type_length = sys.maxsize diff --git a/dlt/destinations/sql_jobs.py b/dlt/destinations/sql_jobs.py index e156361926..4b7700bb4a 100644 --- a/dlt/destinations/sql_jobs.py +++ b/dlt/destinations/sql_jobs.py @@ -3,7 +3,6 @@ import yaml from dlt.common.logger import pretty_format_exception -from dlt.common.pendulum import pendulum from dlt.common.schema.typing import ( TTableSchema, TSortOrder, @@ -24,10 +23,6 @@ from dlt.pipeline.current import load_package as current_load_package -HIGH_TS = pendulum.datetime(9999, 12, 31) -"""High timestamp used to indicate active records in `scd2` merge strategy.""" - - class SqlJobParams(TypedDict, total=False): replace: Optional[bool] table_chain_create_table_statements: Dict[str, Sequence[str]] @@ -537,7 +532,9 @@ def gen_scd2_sql( current_load_package()["state"]["created_at"], caps.timestamp_precision, ) - active_record_ts = format_datetime_literal(HIGH_TS, caps.timestamp_precision) + active_record_ts = format_datetime_literal( + caps.scd2_high_timestamp, caps.timestamp_precision + ) # retire updated and deleted records sql.append(f""" diff --git a/tests/load/pipeline/test_scd2.py b/tests/load/pipeline/test_scd2.py index 65a0742195..a84dab4405 100644 --- a/tests/load/pipeline/test_scd2.py +++ b/tests/load/pipeline/test_scd2.py @@ -11,8 +11,6 @@ from dlt.common.normalizers.json.relational import DataItemNormalizer from dlt.common.normalizers.naming.snake_case import NamingConvention as SnakeCaseNamingConvention from dlt.common.time import ensure_pendulum_datetime, reduce_pendulum_datetime_precision -from dlt.common.typing import TDataItem -from dlt.destinations.sql_jobs import HIGH_TS from dlt.extract.resource import DltResource from dlt.pipeline.exceptions import PipelineStepFailed @@ -29,8 +27,9 @@ def get_active_ts(pipeline: dlt.Pipeline) -> datetime: + high_ts = pipeline.destination.capabilities().scd2_high_timestamp caps = pipeline._get_destination_capabilities() - active_ts = HIGH_TS.in_timezone(tz="UTC").replace(tzinfo=None) + active_ts = high_ts.in_timezone(tz="UTC").replace(tzinfo=None) return reduce_pendulum_datetime_precision(active_ts, caps.timestamp_precision) @@ -46,10 +45,10 @@ def get_load_package_created_at(pipeline: dlt.Pipeline, load_info: LoadInfo) -> return reduce_pendulum_datetime_precision(created_at, caps.timestamp_precision) -def strip_timezone(ts: datetime) -> datetime: +def strip_timezone(ts: datetime, high_ts: datetime) -> datetime: """Converts timezone of datetime object to UTC and removes timezone awareness.""" ts = ensure_pendulum_datetime(ts) - if ts.replace(tzinfo=None) == HIGH_TS: + if ts.replace(tzinfo=None) == high_ts: return ts.replace(tzinfo=None) else: return ts.astimezone(tz=timezone.utc).replace(tzinfo=None) @@ -59,10 +58,11 @@ def get_table( pipeline: dlt.Pipeline, table_name: str, sort_column: str, include_root_id: bool = True ) -> List[Dict[str, Any]]: """Returns destination table contents as list of dictionaries.""" + high_ts = pipeline.destination.capabilities().scd2_high_timestamp return sorted( [ { - k: strip_timezone(v) if isinstance(v, datetime) else v + k: strip_timezone(v, high_ts) if isinstance(v, datetime) else v for k, v in r.items() if not k.startswith("_dlt") or k in DEFAULT_VALIDITY_COLUMN_NAMES From 544a0a401754998eba833167871c5e2e16b9cacf Mon Sep 17 00:00:00 2001 From: Dave Date: Mon, 22 Apr 2024 15:29:24 +0200 Subject: [PATCH 106/127] remove corelated query from scd2 implementation --- dlt/destinations/sql_jobs.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dlt/destinations/sql_jobs.py b/dlt/destinations/sql_jobs.py index 4b7700bb4a..4ee5abaec7 100644 --- a/dlt/destinations/sql_jobs.py +++ b/dlt/destinations/sql_jobs.py @@ -579,9 +579,11 @@ def gen_scd2_sql( staging_table_name = sql_client.make_qualified_table_name(table["name"]) sql.append(f""" INSERT INTO {table_name} - SELECT * + SELECT s.* FROM {staging_table_name} AS s - WHERE NOT EXISTS (SELECT 1 FROM {table_name} AS f WHERE f.{unique_column} = s.{unique_column}); + LEFT JOIN {table_name} as t + on s.{unique_column} = t.{unique_column} + WHERE t.{unique_column} IS NULL or t.{unique_column} = ''; """) return sql From cb9f35b591e15b50b3db7143bc0916d730d0127a Mon Sep 17 00:00:00 2001 From: Dave Date: Mon, 22 Apr 2024 17:25:44 +0200 Subject: [PATCH 107/127] fix merge sql for clickhouse --- dlt/destinations/impl/clickhouse/clickhouse.py | 11 ++++------- dlt/destinations/sql_jobs.py | 10 +++++++++- dlt/helpers/dbt/profiles.yml | 11 +++++++++++ tests/load/pipeline/test_merge_disposition.py | 13 +++++++++---- 4 files changed, 33 insertions(+), 12 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index c6637c25ef..0eb5ea278d 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -266,13 +266,6 @@ def gen_key_table_clauses( key_clauses: Sequence[str], for_delete: bool, ) -> List[str]: - if for_delete: - # clickhouse doesn't support alias in DELETE FROM - return [ - f"FROM {root_table_name} WHERE EXISTS (SELECT 1 FROM" - f" {staging_root_table_name} WHERE" - f" {' OR '.join([c.format(d=root_table_name,s=staging_root_table_name) for c in key_clauses])})" - ] join_conditions = " AND ".join([c.format(d="d", s="s") for c in key_clauses]) return [ f"FROM {root_table_name} AS d JOIN {staging_root_table_name} AS s ON {join_conditions}" @@ -282,6 +275,10 @@ def gen_key_table_clauses( def gen_update_table_prefix(cls, table_name: str) -> str: return f"ALTER TABLE {table_name} UPDATE" + @classmethod + def supports_simple_merge_delete(cls) -> bool: + return False + class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): capabilities: ClassVar[DestinationCapabilitiesContext] = capabilities() diff --git a/dlt/destinations/sql_jobs.py b/dlt/destinations/sql_jobs.py index 4ee5abaec7..c3425c8cbd 100644 --- a/dlt/destinations/sql_jobs.py +++ b/dlt/destinations/sql_jobs.py @@ -382,7 +382,7 @@ def gen_merge_sql( unique_column: str = None root_key_column: str = None - if len(table_chain) == 1: + if len(table_chain) == 1 and cls.supports_simple_merge_delete(): key_table_clauses = cls.gen_key_table_clauses( root_table_name, staging_root_table_name, key_clauses, for_delete=True ) @@ -590,3 +590,11 @@ def gen_scd2_sql( @classmethod def gen_update_table_prefix(cls, table_name: str) -> str: return f"UPDATE {table_name} SET" + + @classmethod + def supports_simple_merge_delete(cls) -> bool: + """this could also be a capabitiy, but probably it is better stored here + this identifies destinations that can have a simplified method for merging single + table table chains + """ + return True diff --git a/dlt/helpers/dbt/profiles.yml b/dlt/helpers/dbt/profiles.yml index 8f2ad22585..d56e90f949 100644 --- a/dlt/helpers/dbt/profiles.yml +++ b/dlt/helpers/dbt/profiles.yml @@ -171,3 +171,14 @@ databricks: http_path: "{{ env_var('DLT__CREDENTIALS__HTTP_PATH') }}" token: "{{ env_var('DLT__CREDENTIALS__ACCESS_TOKEN') }}" threads: 4 + + +clickhouse: + target: analytics + outputs: + analytics: + type: clickhouse + schema: "{{ var('destination_dataset_name', var('source_dataset_name')) }}" + host: "{{ env_var('DLT__CREDENTIALS__HOST') }}" + user: "{{ env_var('DLT__CREDENTIALS__USERNAME') }}" + password: "{{ env_var('DLT__CREDENTIALS__PASSWORD') }}" \ No newline at end of file diff --git a/tests/load/pipeline/test_merge_disposition.py b/tests/load/pipeline/test_merge_disposition.py index bfcdccfba4..d2978e105a 100644 --- a/tests/load/pipeline/test_merge_disposition.py +++ b/tests/load/pipeline/test_merge_disposition.py @@ -139,7 +139,7 @@ def test_merge_on_ad_hoc_primary_key(destination_config: DestinationTestConfigur @dlt.source(root_key=True) -def github(): +def github(remove_lists: bool = False): @dlt.resource( table_name="issues", write_disposition="merge", @@ -150,7 +150,12 @@ def load_issues(): with open( "tests/normalize/cases/github.issues.load_page_5_duck.json", "r", encoding="utf-8" ) as f: - yield from json.load(f) + for item in json.load(f): + # for clickhouse we cannot have lists in json fields + if remove_lists: + item.pop("assignees") + item.pop("labels") + yield item return load_issues @@ -212,7 +217,7 @@ def test_merge_source_compound_keys_and_changes( ) def test_merge_no_child_tables(destination_config: DestinationTestConfiguration) -> None: p = destination_config.setup_pipeline("github_3", full_refresh=True) - github_data = github() + github_data = github(True) assert github_data.max_table_nesting is None assert github_data.root_key is True # set max nesting to 0 so no child tables are generated @@ -231,7 +236,7 @@ def test_merge_no_child_tables(destination_config: DestinationTestConfiguration) assert github_1_counts["issues"] == 15 # load all - github_data = github() + github_data = github(True) github_data.max_table_nesting = 0 info = p.run(github_data, loader_file_format=destination_config.file_format) assert_load_info(info) From 4929bd148f84d50d1e4b1aca463130e921157a8c Mon Sep 17 00:00:00 2001 From: Dave Date: Mon, 22 Apr 2024 19:36:56 +0200 Subject: [PATCH 108/127] fix merge tests --- dlt/destinations/impl/clickhouse/clickhouse.py | 2 +- dlt/destinations/sql_jobs.py | 4 ++-- tests/load/pipeline/test_pipelines.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 0eb5ea278d..fbe660afc8 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -276,7 +276,7 @@ def gen_update_table_prefix(cls, table_name: str) -> str: return f"ALTER TABLE {table_name} UPDATE" @classmethod - def supports_simple_merge_delete(cls) -> bool: + def requires_temp_table_for_delete(cls) -> bool: return False diff --git a/dlt/destinations/sql_jobs.py b/dlt/destinations/sql_jobs.py index c3425c8cbd..e1be230767 100644 --- a/dlt/destinations/sql_jobs.py +++ b/dlt/destinations/sql_jobs.py @@ -382,7 +382,7 @@ def gen_merge_sql( unique_column: str = None root_key_column: str = None - if len(table_chain) == 1 and cls.supports_simple_merge_delete(): + if len(table_chain) == 1 and cls.requires_temp_table_for_delete(): key_table_clauses = cls.gen_key_table_clauses( root_table_name, staging_root_table_name, key_clauses, for_delete=True ) @@ -592,7 +592,7 @@ def gen_update_table_prefix(cls, table_name: str) -> str: return f"UPDATE {table_name} SET" @classmethod - def supports_simple_merge_delete(cls) -> bool: + def requires_temp_table_for_delete(cls) -> bool: """this could also be a capabitiy, but probably it is better stored here this identifies destinations that can have a simplified method for merging single table table chains diff --git a/tests/load/pipeline/test_pipelines.py b/tests/load/pipeline/test_pipelines.py index 9e41e96f4f..05dae6db61 100644 --- a/tests/load/pipeline/test_pipelines.py +++ b/tests/load/pipeline/test_pipelines.py @@ -910,7 +910,7 @@ def table_1(): yield {"id": 1} @dlt.resource( - columns=[{"name": "id", "data_type": "bigint", "nullable": True}], + columns=[{"name": "id", "data_type": "bigint", "nullable": True, "unique": True}], write_disposition="merge", ) def table_2(): From 6badbcc0aead05fcd59c258071781991ff438d45 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 01:19:54 +0200 Subject: [PATCH 109/127] some further fixes --- dlt/destinations/impl/bigquery/bigquery.py | 1 + dlt/helpers/dbt/profiles.yml | 6 ++++-- .../pipeline/test_write_disposition_changes.py | 2 +- tests/load/test_job_client.py | 17 ++++++++++------- 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/dlt/destinations/impl/bigquery/bigquery.py b/dlt/destinations/impl/bigquery/bigquery.py index 6c9456d723..0ac042a056 100644 --- a/dlt/destinations/impl/bigquery/bigquery.py +++ b/dlt/destinations/impl/bigquery/bigquery.py @@ -83,6 +83,7 @@ class BigQueryTypeMapper(TypeMapper): "NUMERIC": "decimal", "BIGNUMERIC": "decimal", "JSON": "complex", + "TIME": "time", } def to_db_decimal_type(self, precision: Optional[int], scale: Optional[int]) -> str: diff --git a/dlt/helpers/dbt/profiles.yml b/dlt/helpers/dbt/profiles.yml index d56e90f949..d82eb0f2fa 100644 --- a/dlt/helpers/dbt/profiles.yml +++ b/dlt/helpers/dbt/profiles.yml @@ -178,7 +178,9 @@ clickhouse: outputs: analytics: type: clickhouse - schema: "{{ var('destination_dataset_name', var('source_dataset_name')) }}" + schema: "{{ env_var('DLT__CREDENTIALS__DATABASE') }}" host: "{{ env_var('DLT__CREDENTIALS__HOST') }}" user: "{{ env_var('DLT__CREDENTIALS__USERNAME') }}" - password: "{{ env_var('DLT__CREDENTIALS__PASSWORD') }}" \ No newline at end of file + password: "{{ env_var('DLT__CREDENTIALS__PASSWORD') }}" + port: "{{ env_var('DLT__CREDENTIALS__PORT', 8443) | as_number }}" + secure: True \ No newline at end of file diff --git a/tests/load/pipeline/test_write_disposition_changes.py b/tests/load/pipeline/test_write_disposition_changes.py index 50986727ed..a5fe4d87b4 100644 --- a/tests/load/pipeline/test_write_disposition_changes.py +++ b/tests/load/pipeline/test_write_disposition_changes.py @@ -132,7 +132,7 @@ def source(): if destination_allows_adding_root_key and not with_root_key: return - # without a root key this will fail, it is expected + # without a root key this will fail, it is expected as adding non-nullable columns should not work if not with_root_key and destination_config.supports_merge: with pytest.raises(PipelineStepFailed): pipeline.run( diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index c120f95e22..bd9f246aff 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -790,18 +790,21 @@ def _load_something(_client: SqlJobClientBase, expected_rows: int) -> None: # 3 rows because we load to the same table _load_something(client, 3) - # adding new non null column will generate sync error + # adding new non null column will generate sync error, except for clickhouse, there it will work event_3_schema.tables["event_user"]["columns"]["mandatory_column"] = new_column( "mandatory_column", "text", nullable=False ) client.schema._bump_version() - with pytest.raises(DatabaseException) as py_ex: + if destination_config.destination == "clickhouse": client.update_stored_schema() - assert ( - "mandatory_column" in str(py_ex.value).lower() - or "NOT NULL" in str(py_ex.value) - or "Adding columns with constraints not yet supported" in str(py_ex.value) - ) + else: + with pytest.raises(DatabaseException) as py_ex: + client.update_stored_schema() + assert ( + "mandatory_column" in str(py_ex.value).lower() + or "NOT NULL" in str(py_ex.value) + or "Adding columns with constraints not yet supported" in str(py_ex.value) + ) def prepare_schema(client: SqlJobClientBase, case: str) -> Tuple[List[Dict[str, Any]], str]: From 7950642273dfcd758f61c148873e0ab413570f20 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 01:32:39 +0200 Subject: [PATCH 110/127] fix athena tests --- tests/load/test_sql_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/load/test_sql_client.py b/tests/load/test_sql_client.py index cba26c36dd..26d7884179 100644 --- a/tests/load/test_sql_client.py +++ b/tests/load/test_sql_client.py @@ -641,7 +641,7 @@ def prepare_temp_table(client: SqlJobClientBase) -> str: ) coltype = "bigint" qualified_table_name = table_name - if client.config.destination_type == "clickhouse": + elif client.config.destination_type == "clickhouse": ddl_suffix = "ENGINE = MergeTree() ORDER BY col" qualified_table_name = client.sql_client.make_qualified_table_name(table_name) else: From c98b8b2899fb2dac8cd0a53006f20d11330e7677 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 01:38:27 +0200 Subject: [PATCH 111/127] disable dbt for now --- tests/load/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/load/utils.py b/tests/load/utils.py index 6156b33242..278156480e 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -208,6 +208,7 @@ def destinations_configs( DestinationTestConfiguration( destination="clickhouse", file_format="jsonl", + supports_dbt=False ) ] destination_configs += [ From 89f30d0a2353c5be18394758ca85c95529ca248c Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 10:57:22 +0200 Subject: [PATCH 112/127] smaller changes --- dlt/destinations/impl/clickhouse/__init__.py | 1 - dlt/destinations/impl/clickhouse/clickhouse.py | 2 +- dlt/destinations/impl/clickhouse/configuration.py | 4 +--- dlt/destinations/sql_jobs.py | 4 ++-- tests/load/utils.py | 4 +--- 5 files changed, 5 insertions(+), 10 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 42c14b4216..133a1760e8 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -12,7 +12,6 @@ def capabilities() -> DestinationCapabilitiesContext: caps = DestinationCapabilitiesContext() - # ClickHouse only supports loading from staged files on s3 for now. caps.preferred_loader_file_format = "jsonl" caps.supported_loader_file_formats = ["parquet", "jsonl"] caps.preferred_staging_file_format = "jsonl" diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index fbe660afc8..3f95bae5ea 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -277,7 +277,7 @@ def gen_update_table_prefix(cls, table_name: str) -> str: @classmethod def requires_temp_table_for_delete(cls) -> bool: - return False + return True class ClickHouseClient(SqlJobClientWithStaging, SupportsStagingDestination): diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 872ffbe66e..49124e9cbb 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -16,7 +16,7 @@ @configspec(init=False) class ClickHouseCredentials(ConnectionStringCredentials): drivername: str = "clickhouse" - host: str # type: ignore + host: str = None """Host with running ClickHouse server.""" port: int = 9440 """Native port ClickHouse server is bound to. Defaults to 9440.""" @@ -42,8 +42,6 @@ class ClickHouseCredentials(ConnectionStringCredentials): __config_gen_annotations__: ClassVar[List[str]] = [ "host", "port", - "http_port", - "secure", "database", "username", "password", diff --git a/dlt/destinations/sql_jobs.py b/dlt/destinations/sql_jobs.py index e1be230767..6dff31a799 100644 --- a/dlt/destinations/sql_jobs.py +++ b/dlt/destinations/sql_jobs.py @@ -382,7 +382,7 @@ def gen_merge_sql( unique_column: str = None root_key_column: str = None - if len(table_chain) == 1 and cls.requires_temp_table_for_delete(): + if len(table_chain) == 1 and not cls.requires_temp_table_for_delete(): key_table_clauses = cls.gen_key_table_clauses( root_table_name, staging_root_table_name, key_clauses, for_delete=True ) @@ -597,4 +597,4 @@ def requires_temp_table_for_delete(cls) -> bool: this identifies destinations that can have a simplified method for merging single table table chains """ - return True + return False diff --git a/tests/load/utils.py b/tests/load/utils.py index 278156480e..e7817bfbd1 100644 --- a/tests/load/utils.py +++ b/tests/load/utils.py @@ -206,9 +206,7 @@ def destinations_configs( ] destination_configs += [ DestinationTestConfiguration( - destination="clickhouse", - file_format="jsonl", - supports_dbt=False + destination="clickhouse", file_format="jsonl", supports_dbt=False ) ] destination_configs += [ From 02dfb08717653c704f435fe492379fe47c563da4 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 13:33:33 +0200 Subject: [PATCH 113/127] add clickhouse adapter tests, update small part of docs and correct imports --- dlt/common/configuration/providers/airflow.py | 1 - dlt/destinations/adapters.py | 9 ++- dlt/destinations/impl/clickhouse/__init__.py | 1 + .../impl/clickhouse/configuration.py | 1 + .../impl/clickhouse/sql_client.py | 2 +- .../dlt-ecosystem/destinations/clickhouse.md | 9 ++- .../clickhouse/test_clickhouse_adapter.py | 61 +++++++++++++++++++ 7 files changed, 79 insertions(+), 5 deletions(-) create mode 100644 tests/load/clickhouse/test_clickhouse_adapter.py diff --git a/dlt/common/configuration/providers/airflow.py b/dlt/common/configuration/providers/airflow.py index 99edf258d2..edd02c3487 100644 --- a/dlt/common/configuration/providers/airflow.py +++ b/dlt/common/configuration/providers/airflow.py @@ -14,7 +14,6 @@ def name(self) -> str: def _look_vault(self, full_key: str, hint: type) -> str: """Get Airflow Variable with given `full_key`, return None if not found""" - from airflow.models import Variable with contextlib.redirect_stdout(io.StringIO()), contextlib.redirect_stderr(io.StringIO()): diff --git a/dlt/destinations/adapters.py b/dlt/destinations/adapters.py index a143934116..554bd88924 100644 --- a/dlt/destinations/adapters.py +++ b/dlt/destinations/adapters.py @@ -4,5 +4,12 @@ from dlt.destinations.impl.qdrant import qdrant_adapter from dlt.destinations.impl.bigquery import bigquery_adapter from dlt.destinations.impl.synapse import synapse_adapter +from dlt.destinations.impl.clickhouse import clickhouse_adapter -__all__ = ["weaviate_adapter", "qdrant_adapter", "bigquery_adapter", "synapse_adapter"] +__all__ = [ + "weaviate_adapter", + "qdrant_adapter", + "bigquery_adapter", + "synapse_adapter", + "clickhouse_adapter", +] diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 133a1760e8..91cff79d53 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -8,6 +8,7 @@ format_clickhouse_datetime_literal, ) from dlt.common.destination import DestinationCapabilitiesContext +from dlt.destinations.impl.clickhouse.clickhouse_adapter import clickhouse_adapter def capabilities() -> DestinationCapabilitiesContext: diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 49124e9cbb..1a178fe2ec 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -42,6 +42,7 @@ class ClickHouseCredentials(ConnectionStringCredentials): __config_gen_annotations__: ClassVar[List[str]] = [ "host", "port", + "http_port", "database", "username", "password", diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 8d87df9c07..6671f5b348 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -98,7 +98,7 @@ def create_dataset(self) -> None: self.credentials.dataset_sentinel_table_name ) self.execute_sql( - f"""CREATE TABLE {sentinel_table_name} (_dlt_id String NOT NULL PRIMARY KEY) ENGINE=MergeTree COMMENT 'internal dlt sentinel table'""" + f"""CREATE TABLE {sentinel_table_name} (_dlt_id String NOT NULL PRIMARY KEY) ENGINE=ReplicatedMergeTree COMMENT 'internal dlt sentinel table'""" ) def drop_dataset(self) -> None: diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index 2a3d6f5123..811166d400 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -106,12 +106,17 @@ ClickHouse supports the following [column hints](https://dlthub.com/docs/general ## Table Engine -By default, tables are created using the `ReplicatedMergeTree` table engine in ClickHouse. You can specify an alternate table engine using the `table_engine_type` hint on the resource: +By default, tables are created using the `ReplicatedMergeTree` table engine in ClickHouse. You can specify an alternate table engine using the `table_engine_type` with the clickhouse adapter: ```py -@dlt.resource(table_engine_type="merge_tree") +from dlt.destinations.adapters import clickhouse_adapter + +@dlt.resource() def my_resource(): ... + +clickhouse_adapter(my_resource, table_engine_type="merge_tree") + ``` Supported values are: diff --git a/tests/load/clickhouse/test_clickhouse_adapter.py b/tests/load/clickhouse/test_clickhouse_adapter.py new file mode 100644 index 0000000000..36d3ac07f7 --- /dev/null +++ b/tests/load/clickhouse/test_clickhouse_adapter.py @@ -0,0 +1,61 @@ +import dlt +from dlt.destinations.adapters import clickhouse_adapter +from tests.pipeline.utils import assert_load_info + + +def test_clickhouse_adapter() -> None: + @dlt.resource + def merge_tree_resource(): + yield {"field1": 1, "field2": 2} + + @dlt.resource + def replicated_merge_tree_resource(): + yield {"field1": 1, "field2": 2} + + @dlt.resource + def not_annotated_resource(): + yield {"field1": 1, "field2": 2} + + clickhouse_adapter(merge_tree_resource, table_engine_type="merge_tree") + clickhouse_adapter(replicated_merge_tree_resource, table_engine_type="replicated_merge_tree") + + pipe = dlt.pipeline(pipeline_name="adapter_test", destination="clickhouse", full_refresh=True) + pack = pipe.run([merge_tree_resource, replicated_merge_tree_resource, not_annotated_resource]) + + assert_load_info(pack) + + with pipe.sql_client() as client: + # get map of table names to full table names + tables = {} + for table in client._list_tables(): + if "resource" in table: + tables[table.split("___")[1]] = table + assert (len(tables.keys())) == 3 + + # check content + for full_table_name in tables.values(): + with client.execute_query(f"SELECT * FROM {full_table_name};") as cursor: + res = cursor.fetchall() + assert tuple(res[0])[:2] == (1, 2) + + # check table format + # fails now, because we do not have a cluster (I think), it will fall back to SharedMergeTree + for full_table_name in tables.values(): + with client.execute_query( + "SELECT database, name, engine, engine_full FROM system.tables WHERE name =" + f" '{full_table_name}';" + ) as cursor: + res = cursor.fetchall() + # this should test that two tables should be replicatedmergetree tables + assert tuple(res[0])[2] == "SharedMergeTree" + + # we can check the gen table sql though + with pipe.destination_client() as dest_client: + for table in tables.keys(): + sql = dest_client._get_table_update_sql( # type: ignore[attr-defined] + table, pipe.default_schema.tables[table]["columns"].values(), generate_alter=False + ) + if table == "merge_tree_resource": + assert "ENGINE = MergeTree" in sql[0] + else: + assert "ENGINE = ReplicatedMergeTree" in sql[0] From b06bd9b7e8c284b811f6b5e6c663012377dd59e2 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 13:38:23 +0200 Subject: [PATCH 114/127] update scd2 sql based on jorrits suggestions --- dlt/destinations/sql_jobs.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/dlt/destinations/sql_jobs.py b/dlt/destinations/sql_jobs.py index 6dff31a799..964aa248e0 100644 --- a/dlt/destinations/sql_jobs.py +++ b/dlt/destinations/sql_jobs.py @@ -579,12 +579,11 @@ def gen_scd2_sql( staging_table_name = sql_client.make_qualified_table_name(table["name"]) sql.append(f""" INSERT INTO {table_name} - SELECT s.* - FROM {staging_table_name} AS s - LEFT JOIN {table_name} as t - on s.{unique_column} = t.{unique_column} - WHERE t.{unique_column} IS NULL or t.{unique_column} = ''; + SELECT * + FROM {staging_table_name} + WHERE {unique_column} NOT IN (SELECT {unique_column} FROM {table_name}); """) + return sql @classmethod From 0ca6c36768a428a0903090847ab64ae242927dc9 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 17:38:57 +0200 Subject: [PATCH 115/127] change merge change test to make it pass --- tests/load/pipeline/test_write_disposition_changes.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/load/pipeline/test_write_disposition_changes.py b/tests/load/pipeline/test_write_disposition_changes.py index a5fe4d87b4..cfe460c670 100644 --- a/tests/load/pipeline/test_write_disposition_changes.py +++ b/tests/load/pipeline/test_write_disposition_changes.py @@ -127,9 +127,15 @@ def source(): # schemaless destinations allow adding of root key without the pipeline failing # for now this is only the case for dremio # doing this will result in somewhat useless behavior - destination_allows_adding_root_key = destination_config.destination == "dremio" + destination_allows_adding_root_key = destination_config.destination in ["dremio", "clickhouse"] if destination_allows_adding_root_key and not with_root_key: + pipeline.run( + s, + table_name="items", + write_disposition="merge", + loader_file_format=destination_config.file_format, + ) return # without a root key this will fail, it is expected as adding non-nullable columns should not work From 290faa1bb516165d2ead29a5c911580914a55649 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 17:59:23 +0200 Subject: [PATCH 116/127] use text for json in clickhouse --- dlt/destinations/impl/clickhouse/clickhouse.py | 5 ++--- tests/cases.py | 4 ++++ tests/load/pipeline/test_merge_disposition.py | 10 +++------- tests/load/pipeline/test_pipelines.py | 3 ++- tests/load/pipeline/test_stage_loading.py | 5 +---- tests/load/test_job_client.py | 4 ---- 6 files changed, 12 insertions(+), 19 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 3f95bae5ea..17a9951335 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -22,7 +22,6 @@ FollowupJob, LoadJob, NewLoadJob, - StorageSchemaInfo, ) from dlt.common.schema import Schema, TColumnSchema from dlt.common.schema.typing import ( @@ -72,7 +71,7 @@ class ClickHouseTypeMapper(TypeMapper): sct_to_unbound_dbt = { - "complex": "JSON", + "complex": "text", "text": "String", "double": "Float64", "bool": "Boolean", @@ -182,7 +181,7 @@ def __init__( fmt=clickhouse_format, settings={ "allow_experimental_lightweight_delete": 1, - "allow_experimental_object_type": 1, + # "allow_experimental_object_type": 1, "enable_http_compression": 1, }, compression=compression, diff --git a/tests/cases.py b/tests/cases.py index 15e3fef091..0ba2c05f40 100644 --- a/tests/cases.py +++ b/tests/cases.py @@ -198,6 +198,7 @@ def assert_all_data_types_row( schema: TTableSchemaColumns = None, expect_filtered_null_columns=False, allow_string_binary: bool = False, + expect_empty_string_for_null_complex: bool = False, ) -> None: # content must equal # print(db_row) @@ -237,6 +238,9 @@ def assert_all_data_types_row( ensure_pendulum_time(expected_rows["col11_precision"]), 3 # type: ignore[arg-type] ) + if "col9_null" in expected_rows and expect_empty_string_for_null_complex: + expected_rows["col9_null"] = "" + # redshift and bigquery return strings from structured fields for binary_col in ["col7", "col7_precision"]: if binary_col in db_mapping: diff --git a/tests/load/pipeline/test_merge_disposition.py b/tests/load/pipeline/test_merge_disposition.py index d2978e105a..aaa8a73571 100644 --- a/tests/load/pipeline/test_merge_disposition.py +++ b/tests/load/pipeline/test_merge_disposition.py @@ -139,7 +139,7 @@ def test_merge_on_ad_hoc_primary_key(destination_config: DestinationTestConfigur @dlt.source(root_key=True) -def github(remove_lists: bool = False): +def github(): @dlt.resource( table_name="issues", write_disposition="merge", @@ -151,10 +151,6 @@ def load_issues(): "tests/normalize/cases/github.issues.load_page_5_duck.json", "r", encoding="utf-8" ) as f: for item in json.load(f): - # for clickhouse we cannot have lists in json fields - if remove_lists: - item.pop("assignees") - item.pop("labels") yield item return load_issues @@ -217,7 +213,7 @@ def test_merge_source_compound_keys_and_changes( ) def test_merge_no_child_tables(destination_config: DestinationTestConfiguration) -> None: p = destination_config.setup_pipeline("github_3", full_refresh=True) - github_data = github(True) + github_data = github() assert github_data.max_table_nesting is None assert github_data.root_key is True # set max nesting to 0 so no child tables are generated @@ -236,7 +232,7 @@ def test_merge_no_child_tables(destination_config: DestinationTestConfiguration) assert github_1_counts["issues"] == 15 # load all - github_data = github(True) + github_data = github() github_data.max_table_nesting = 0 info = p.run(github_data, loader_file_format=destination_config.file_format) assert_load_info(info) diff --git a/tests/load/pipeline/test_pipelines.py b/tests/load/pipeline/test_pipelines.py index 05dae6db61..818b6fa6e1 100644 --- a/tests/load/pipeline/test_pipelines.py +++ b/tests/load/pipeline/test_pipelines.py @@ -795,7 +795,7 @@ def other_data(): column_schemas = deepcopy(TABLE_UPDATE_COLUMNS_SCHEMA) # parquet on bigquery and clickhouse does not support JSON but we still want to run the test - if destination_config.destination in ["bigquery", "clickhouse"]: + if destination_config.destination in ["bigquery"]: column_schemas["col9_null"]["data_type"] = column_schemas["col9"]["data_type"] = "text" # duckdb 0.9.1 does not support TIME other than 6 @@ -873,6 +873,7 @@ def some_source(): in ["snowflake", "bigquery", "redshift"], allow_string_binary=destination_config.destination == "clickhouse", timestamp_precision=3 if destination_config.destination in ("athena", "dremio") else 6, + expect_empty_string_for_null_complex=destination_config.destination == "clickhouse", ) diff --git a/tests/load/pipeline/test_stage_loading.py b/tests/load/pipeline/test_stage_loading.py index 60a7be259b..f6c47f5ecd 100644 --- a/tests/load/pipeline/test_stage_loading.py +++ b/tests/load/pipeline/test_stage_loading.py @@ -204,10 +204,6 @@ def test_all_data_types(destination_config: DestinationTestConfiguration) -> Non exclude_types=exclude_types, exclude_columns=exclude_columns ) - # clickhouse json is experimental, will not work for parquet and makes strange changes for jsonl - if destination_config.destination in ["clickhouse"]: - column_schemas["col9_null"]["data_type"] = column_schemas["col9"]["data_type"] = "text" - # bigquery and clickhouse cannot load into JSON fields from parquet if destination_config.file_format == "parquet": if destination_config.destination in ["bigquery"]: @@ -260,4 +256,5 @@ def my_source(): allow_string_binary=allow_string_binary, timestamp_precision=sql_client.capabilities.timestamp_precision, schema=column_schemas, + expect_empty_string_for_null_complex=destination_config.destination == "clickhouse", ) diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index bd9f246aff..4c442d1ff9 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -521,10 +521,6 @@ def test_load_with_all_types( ), ) - # switch complex to string for clickhouse - if client.config.destination_type in ["clickhouse"]: - column_schemas["col9_null"]["data_type"] = column_schemas["col9"]["data_type"] = "text" - # we should have identical content with all disposition types client.schema.update_table( new_table( From 9c8a8b23f4e0df377e6ce9286f9cf4c66e221bfe Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 18:06:01 +0200 Subject: [PATCH 117/127] remove some unrelated unneeded stuff --- dlt/destinations/impl/clickhouse/sql_client.py | 9 +-------- dlt/destinations/impl/databricks/sql_client.py | 6 ------ dlt/destinations/impl/duckdb/sql_client.py | 10 +--------- dlt/destinations/impl/snowflake/sql_client.py | 12 +----------- 4 files changed, 3 insertions(+), 34 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/sql_client.py b/dlt/destinations/impl/clickhouse/sql_client.py index 6671f5b348..8fb89c90cd 100644 --- a/dlt/destinations/impl/clickhouse/sql_client.py +++ b/dlt/destinations/impl/clickhouse/sql_client.py @@ -183,10 +183,7 @@ def _make_database_exception(cls, ex: Exception) -> Exception: clickhouse_driver.dbapi.errors.InternalError, ), ): - if term := cls._maybe_make_terminal_exception_from_data_error(): - return term - else: - return DatabaseTransientException(ex) + return DatabaseTransientException(ex) elif isinstance( ex, ( @@ -201,10 +198,6 @@ def _make_database_exception(cls, ex: Exception) -> Exception: else: return ex - @staticmethod - def _maybe_make_terminal_exception_from_data_error() -> Optional[Exception]: - return None - @staticmethod def is_dbapi_exception(ex: Exception) -> bool: return isinstance(ex, clickhouse_driver.dbapi.Error) diff --git a/dlt/destinations/impl/databricks/sql_client.py b/dlt/destinations/impl/databricks/sql_client.py index 7e2487593d..7f0ee2b5e6 100644 --- a/dlt/destinations/impl/databricks/sql_client.py +++ b/dlt/destinations/impl/databricks/sql_client.py @@ -142,12 +142,6 @@ def _make_database_exception(ex: Exception) -> Exception: else: return DatabaseTransientException(ex) - @staticmethod - def _maybe_make_terminal_exception_from_data_error( - databricks_ex: databricks_lib.DatabaseError, - ) -> Optional[Exception]: - return None - @staticmethod def is_dbapi_exception(ex: Exception) -> bool: return isinstance(ex, databricks_lib.DatabaseError) diff --git a/dlt/destinations/impl/duckdb/sql_client.py b/dlt/destinations/impl/duckdb/sql_client.py index 2863d4943e..bb85b5825b 100644 --- a/dlt/destinations/impl/duckdb/sql_client.py +++ b/dlt/destinations/impl/duckdb/sql_client.py @@ -168,11 +168,7 @@ def _make_database_exception(cls, ex: Exception) -> Exception: duckdb.ParserException, ), ): - term = cls._maybe_make_terminal_exception_from_data_error(ex) - if term: - return term - else: - return DatabaseTransientException(ex) + return DatabaseTransientException(ex) elif isinstance(ex, (duckdb.DataError, duckdb.ProgrammingError, duckdb.IntegrityError)): return DatabaseTerminalException(ex) elif cls.is_dbapi_exception(ex): @@ -180,10 +176,6 @@ def _make_database_exception(cls, ex: Exception) -> Exception: else: return ex - @staticmethod - def _maybe_make_terminal_exception_from_data_error(pg_ex: duckdb.Error) -> Optional[Exception]: - return None - @staticmethod def is_dbapi_exception(ex: Exception) -> bool: return isinstance(ex, duckdb.Error) diff --git a/dlt/destinations/impl/snowflake/sql_client.py b/dlt/destinations/impl/snowflake/sql_client.py index ba932277df..4a602ce0e8 100644 --- a/dlt/destinations/impl/snowflake/sql_client.py +++ b/dlt/destinations/impl/snowflake/sql_client.py @@ -148,11 +148,7 @@ def _make_database_exception(cls, ex: Exception) -> Exception: elif isinstance(ex, snowflake_lib.errors.IntegrityError): raise DatabaseTerminalException(ex) elif isinstance(ex, snowflake_lib.errors.DatabaseError): - term = cls._maybe_make_terminal_exception_from_data_error(ex) - if term: - return term - else: - return DatabaseTransientException(ex) + return DatabaseTransientException(ex) elif isinstance(ex, TypeError): # snowflake raises TypeError on malformed query parameters return DatabaseTransientException(snowflake_lib.errors.ProgrammingError(str(ex))) @@ -161,12 +157,6 @@ def _make_database_exception(cls, ex: Exception) -> Exception: else: return ex - @staticmethod - def _maybe_make_terminal_exception_from_data_error( - snowflake_ex: snowflake_lib.DatabaseError, - ) -> Optional[Exception]: - return None - @staticmethod def is_dbapi_exception(ex: Exception) -> bool: return isinstance(ex, snowflake_lib.DatabaseError) From 65c9cec0926c0c9c4186bafa5247217979e72920 Mon Sep 17 00:00:00 2001 From: Dave Date: Tue, 23 Apr 2024 18:21:28 +0200 Subject: [PATCH 118/127] update docs a bit --- .../dlt-ecosystem/destinations/clickhouse.md | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index 811166d400..57f23db83b 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -14,11 +14,6 @@ keywords: [ clickhouse, destination, data warehouse ] pip install dlt[clickhouse] ``` -## Dev Todos for docs -* Clickhouse uses string for time -* bytes are converted to base64 strings when using jsonl and regular strings when using parquet -* JSON / complex fields are experimental currently, they are not supported when loading from parquet and nested structures will be changed when loading from jsonl - ## Setup Guide ### 1. Initialize the dlt project @@ -93,11 +88,27 @@ Data is loaded into ClickHouse using the most efficient method depending on the - For files in remote storage like S3, Google Cloud Storage, or Azure Blob Storage, ClickHouse table functions like `s3`, `gcs` and `azureBlobStorage` are used to read the files and insert the data into tables. +## Datasets + +`Clickhouse` does not support multiple datasets in one database, dlt relies on datasets to exist for multiple reasons. +To make `clickhouse` work with `dlt`, tables generated by `dlt` in your `clickhouse` database will have their name prefixed with the dataset name separated by +the configurable `dataset_table_separator`. Additionally a special sentinel table that does not contain any data will also be created, so dlt knows which virtual datasets already exist in a clickhouse +destination. + ## Supported file formats - [jsonl](../file-formats/jsonl.md) is the preferred format for both direct loading and staging. - [parquet](../file-formats/parquet.md) is supported for both direct loading and staging. +The `clickhouse` destination has a few specific deviations from the default sql destinations: + +1. `Clickhouse` has an experimental `object` datatype, but we have found it to be a bit unpredictable, so the dlt clickhouse destination will load the complex dataype to a `text` column. If you need +this feature, please get in touch in our slack community and we will consider adding it. +2. `Clickhouse` does not support the `time` datatype. Time will be loaded to a `text` column. +3. `Clickhouse` does not support the `binary` datatype. Binary will be loaded to a `text` column. When loading from `jsonl`, this will be a base64 string, when loading from parquet this will be +the `binary` object converted to `text`. +4. `Clickhouse` accepts adding columns to a populated table that are not null. + ## Supported column hints ClickHouse supports the following [column hints](https://dlthub.com/docs/general-usage/schema#tables-and-columns): @@ -149,7 +160,7 @@ pipeline = dlt.pipeline( ### dbt support -Integration with [dbt](../transformations/dbt/dbt.md) is supported. +Integration with [dbt](../transformations/dbt/dbt.md) is generally supported via dbt-clickhouse, but not tested by us at this time. ### Syncing of `dlt` state From 04f357a8ae452f14222a0da257b02b2b7e9d8113 Mon Sep 17 00:00:00 2001 From: Dave Date: Wed, 24 Apr 2024 00:47:32 +0200 Subject: [PATCH 119/127] fix json to string tests and implementation --- dlt/destinations/impl/clickhouse/clickhouse.py | 4 ++-- tests/cases.py | 4 ---- tests/load/clickhouse/test_clickhouse_table_builder.py | 8 ++++---- tests/load/pipeline/test_pipelines.py | 1 - tests/load/pipeline/test_stage_loading.py | 1 - tests/load/test_job_client.py | 8 +++++--- 6 files changed, 11 insertions(+), 15 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 17a9951335..54b36a7a3e 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -71,7 +71,7 @@ class ClickHouseTypeMapper(TypeMapper): sct_to_unbound_dbt = { - "complex": "text", + "complex": "String", "text": "String", "double": "Float64", "bool": "Boolean", @@ -313,7 +313,7 @@ def _get_column_def_sql(self, c: TColumnSchema, table_format: TTableFormat = Non # JSON type isn't nullable in ClickHouse. type_with_nullability_modifier = ( f"Nullable({self.type_mapper.to_db_type(c)})" - if c.get("nullable", True) and c.get("data_type") != "complex" + if c.get("nullable", True) else self.type_mapper.to_db_type(c) ) diff --git a/tests/cases.py b/tests/cases.py index 0ba2c05f40..15e3fef091 100644 --- a/tests/cases.py +++ b/tests/cases.py @@ -198,7 +198,6 @@ def assert_all_data_types_row( schema: TTableSchemaColumns = None, expect_filtered_null_columns=False, allow_string_binary: bool = False, - expect_empty_string_for_null_complex: bool = False, ) -> None: # content must equal # print(db_row) @@ -238,9 +237,6 @@ def assert_all_data_types_row( ensure_pendulum_time(expected_rows["col11_precision"]), 3 # type: ignore[arg-type] ) - if "col9_null" in expected_rows and expect_empty_string_for_null_complex: - expected_rows["col9_null"] = "" - # redshift and bigquery return strings from structured fields for binary_col in ["col7", "col7_precision"]: if binary_col in db_mapping: diff --git a/tests/load/clickhouse/test_clickhouse_table_builder.py b/tests/load/clickhouse/test_clickhouse_table_builder.py index f38b29a5cb..fd3bf50907 100644 --- a/tests/load/clickhouse/test_clickhouse_table_builder.py +++ b/tests/load/clickhouse/test_clickhouse_table_builder.py @@ -66,7 +66,7 @@ def test_clickhouse_create_table(clickhouse_client: ClickHouseClient) -> None: assert "`col6` Decimal(38,9)" in sql assert "`col7` String" in sql assert "`col8` Decimal(76,0)" in sql - assert "`col9` JSON" in sql + assert "`col9` String" in sql assert "`col10` Date" in sql assert "`col11` String" in sql assert "`col1_null` Nullable(Int64)" in sql @@ -77,7 +77,7 @@ def test_clickhouse_create_table(clickhouse_client: ClickHouseClient) -> None: assert "`col6_null` Nullable(Decimal(38,9))" in sql assert "`col7_null` Nullable(String)" in sql assert "`col8_null` Nullable(Decimal(76,0))" in sql - assert "`col9_null` JSON" in sql # JSON isn't nullable in clickhouse + assert "`col9_null` Nullable(String)" in sql assert "`col10_null` Nullable(Date)" in sql assert "`col11_null` Nullable(String)" in sql assert "`col1_precision` Int64" in sql @@ -108,7 +108,7 @@ def test_clickhouse_alter_table(clickhouse_client: ClickHouseClient) -> None: assert "`col6` Decimal(38,9)" in sql assert "`col7` String" in sql assert "`col8` Decimal(76,0)" in sql - assert "`col9` JSON" in sql + assert "`col9` String" in sql assert "`col10` Date" in sql assert "`col11` String" in sql assert "`col1_null` Nullable(Int64)" in sql @@ -119,7 +119,7 @@ def test_clickhouse_alter_table(clickhouse_client: ClickHouseClient) -> None: assert "`col6_null` Nullable(Decimal(38,9))" in sql assert "`col7_null` Nullable(String)" in sql assert "`col8_null` Nullable(Decimal(76,0))" in sql - assert "`col9_null` JSON" in sql + assert "`col9_null` Nullable(String)" in sql assert "`col10_null` Nullable(Date)" in sql assert "`col11_null` Nullable(String)" in sql assert "`col1_precision` Int64" in sql diff --git a/tests/load/pipeline/test_pipelines.py b/tests/load/pipeline/test_pipelines.py index 818b6fa6e1..bfcb310195 100644 --- a/tests/load/pipeline/test_pipelines.py +++ b/tests/load/pipeline/test_pipelines.py @@ -873,7 +873,6 @@ def some_source(): in ["snowflake", "bigquery", "redshift"], allow_string_binary=destination_config.destination == "clickhouse", timestamp_precision=3 if destination_config.destination in ("athena", "dremio") else 6, - expect_empty_string_for_null_complex=destination_config.destination == "clickhouse", ) diff --git a/tests/load/pipeline/test_stage_loading.py b/tests/load/pipeline/test_stage_loading.py index f6c47f5ecd..e7fb42ae21 100644 --- a/tests/load/pipeline/test_stage_loading.py +++ b/tests/load/pipeline/test_stage_loading.py @@ -256,5 +256,4 @@ def my_source(): allow_string_binary=allow_string_binary, timestamp_precision=sql_client.capabilities.timestamp_precision, schema=column_schemas, - expect_empty_string_for_null_complex=destination_config.destination == "clickhouse", ) diff --git a/tests/load/test_job_client.py b/tests/load/test_job_client.py index 4c442d1ff9..08b80af928 100644 --- a/tests/load/test_job_client.py +++ b/tests/load/test_job_client.py @@ -389,8 +389,10 @@ def test_get_storage_table_with_all_types(client: SqlJobClientBase) -> None: "time", ): continue - # mssql and synapse have no native data type for the complex type. - if client.config.destination_type in ("mssql", "synapse") and c["data_type"] in ("complex"): + # mssql, clickhouse and synapse have no native data type for the complex type. + if client.config.destination_type in ("mssql", "synapse", "clickhouse") and c[ + "data_type" + ] in ("complex"): continue if client.config.destination_type == "databricks" and c["data_type"] in ("complex", "time"): continue @@ -550,7 +552,7 @@ def test_load_with_all_types( assert_all_data_types_row( db_row, schema=column_schemas, - allow_base64_binary=True if client.config.destination_type in ["clickhouse"] else False, + allow_base64_binary=client.config.destination_type in ["clickhouse"], ) From d2231ef37f4f9b26be0f5e23eda417a39057cb8c Mon Sep 17 00:00:00 2001 From: Dave Date: Wed, 24 Apr 2024 11:17:56 +0200 Subject: [PATCH 120/127] move gcp access credentials into proper config --- .../impl/clickhouse/clickhouse.py | 19 +++++++++++++------ .../impl/clickhouse/configuration.py | 8 ++++++-- .../test_clickhouse_configuration.py | 12 ------------ 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/dlt/destinations/impl/clickhouse/clickhouse.py b/dlt/destinations/impl/clickhouse/clickhouse.py index 54b36a7a3e..e2c1f827bc 100644 --- a/dlt/destinations/impl/clickhouse/clickhouse.py +++ b/dlt/destinations/impl/clickhouse/clickhouse.py @@ -15,6 +15,7 @@ GcpCredentials, AwsCredentialsWithoutDefaults, ) +from dlt.destinations.exceptions import DestinationTransientException from dlt.common.destination import DestinationCapabilitiesContext from dlt.common.destination.reference import ( SupportsStagingDestination, @@ -194,22 +195,28 @@ def __init__( return # Auto does not work for jsonl, get info from config for buckets + # NOTE: we should not really be accessing the config this way, but for + # now it is ok... if ext == "jsonl": compression = "none" if config.get("data_writer.disable_compression") else "gz" if bucket_scheme in ("s3", "gs", "gcs"): # get auth and bucket url bucket_http_url = convert_storage_to_http_scheme(bucket_url) - access_key_id = None - secret_access_key = None + access_key_id: str = None + secret_access_key: str = None if isinstance(staging_credentials, AwsCredentialsWithoutDefaults): access_key_id = staging_credentials.aws_access_key_id secret_access_key = staging_credentials.aws_secret_access_key elif isinstance(staging_credentials, GcpCredentials): - access_key_id = dlt.config["destination.filesystem.credentials.gcp_access_key_id"] - secret_access_key = dlt.config[ - "destination.filesystem.credentials.gcp_secret_access_key" - ] + access_key_id = client.credentials.gcp_access_key_id + secret_access_key = client.credentials.gcp_secret_access_key + if not access_key_id or not secret_access_key: + raise DestinationTransientException( + "You have tried loading from gcs with clickhouse. Please provide valid" + " 'gcp_access_key_id' and 'gcp_secret_access_key' to connect to gcs as" + " outlined in the dlthub docs." + ) auth = "NOSIGN" if access_key_id and secret_access_key: diff --git a/dlt/destinations/impl/clickhouse/configuration.py b/dlt/destinations/impl/clickhouse/configuration.py index 1a178fe2ec..bbff6e0a9c 100644 --- a/dlt/destinations/impl/clickhouse/configuration.py +++ b/dlt/destinations/impl/clickhouse/configuration.py @@ -1,5 +1,5 @@ import dataclasses -from typing import ClassVar, List, Any, Final, Literal, cast +from typing import ClassVar, List, Any, Final, Literal, cast, Optional from dlt.common.configuration import configspec from dlt.common.configuration.specs import ConnectionStringCredentials @@ -38,6 +38,10 @@ class ClickHouseCredentials(ConnectionStringCredentials): """Separator for dataset table names, defaults to '___', i.e. 'database.dataset___table'.""" dataset_sentinel_table_name: str = "dlt_sentinel_table" """Special table to mark dataset as existing""" + gcp_access_key_id: Optional[str] = None + """When loading from a gcp bucket, you need to provide gcp interoperable keys""" + gcp_secret_access_key: Optional[str] = None + """When loading from a gcp bucket, you need to provide gcp interoperable keys""" __config_gen_annotations__: ClassVar[List[str]] = [ "host", @@ -67,7 +71,7 @@ def to_url(self) -> URL: ("secure", str(1) if self.secure else str(0)), # Toggle experimental settings. These are necessary for certain datatypes and not optional. ("allow_experimental_lightweight_delete", "1"), - ("allow_experimental_object_type", "1"), + # ("allow_experimental_object_type", "1"), ("enable_http_compression", "1"), ] ) diff --git a/tests/load/clickhouse/test_clickhouse_configuration.py b/tests/load/clickhouse/test_clickhouse_configuration.py index dcbf7e0935..eb02155406 100644 --- a/tests/load/clickhouse/test_clickhouse_configuration.py +++ b/tests/load/clickhouse/test_clickhouse_configuration.py @@ -60,17 +60,6 @@ def test_clickhouse_configuration() -> None: assert SnowflakeClientConfiguration(credentials=c).fingerprint() == digest128("host1") -@pytest.mark.usefixtures("environment") -def test_clickhouse_gcp_hmac_getter_accessor(environment: Any) -> None: - environment["DESTINATION__FILESYSTEM__CREDENTIALS__GCP_ACCESS_KEY_ID"] = "25g08jaDJacj42" - environment["DESTINATION__FILESYSTEM__CREDENTIALS__GCP_SECRET_ACCESS_KEY"] = "ascvntp45uasdf" - - assert dlt.config["destination.filesystem.credentials.gcp_access_key_id"] == "25g08jaDJacj42" - assert ( - dlt.config["destination.filesystem.credentials.gcp_secret_access_key"] == "ascvntp45uasdf" - ) - - def test_clickhouse_connection_settings(client: ClickHouseClient) -> None: """Test experimental settings are set correctly for session.""" conn = client.sql_client.open_connection() @@ -84,5 +73,4 @@ def test_clickhouse_connection_settings(client: ClickHouseClient) -> None: res = cursor.fetchall() assert ("allow_experimental_lightweight_delete", "1") in res - assert ("allow_experimental_object_type", "1") in res assert ("enable_http_compression", "1") in res From 78e4c56f591eedf31891fcdcefd6cee22f1ebfc2 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 24 Apr 2024 18:01:08 +0200 Subject: [PATCH 121/127] Add GCS Clickhouse staging docs #1055 Signed-off-by: Marcel Coetzee --- .../dlt-ecosystem/destinations/clickhouse.md | 65 ++++++++++++++++--- 1 file changed, 56 insertions(+), 9 deletions(-) diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index 57f23db83b..0743dc3f35 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -68,7 +68,6 @@ To load data into ClickHouse, you need to create a ClickHouse database. While we secure = 1 # Set to 1 if using HTTPS, else 0. dataset_table_separator = "___" # Separator for dataset table names from dataset. ``` - 2. You can pass a database connection string similar to the one used by the `clickhouse-driver` library. The credentials above will look like this: ```toml @@ -92,7 +91,8 @@ Data is loaded into ClickHouse using the most efficient method depending on the `Clickhouse` does not support multiple datasets in one database, dlt relies on datasets to exist for multiple reasons. To make `clickhouse` work with `dlt`, tables generated by `dlt` in your `clickhouse` database will have their name prefixed with the dataset name separated by -the configurable `dataset_table_separator`. Additionally a special sentinel table that does not contain any data will also be created, so dlt knows which virtual datasets already exist in a clickhouse +the configurable `dataset_table_separator`. Additionally, a special sentinel table that does not contain any data will be created, so dlt knows which virtual datasets already exist in a +clickhouse destination. ## Supported file formats @@ -102,16 +102,16 @@ destination. The `clickhouse` destination has a few specific deviations from the default sql destinations: -1. `Clickhouse` has an experimental `object` datatype, but we have found it to be a bit unpredictable, so the dlt clickhouse destination will load the complex dataype to a `text` column. If you need -this feature, please get in touch in our slack community and we will consider adding it. +1. `Clickhouse` has an experimental `object` datatype, but we have found it to be a bit unpredictable, so the dlt clickhouse destination will load the complex datatype to a `text` column. If you need + this feature, get in touch with our Slack community, and we will consider adding it. 2. `Clickhouse` does not support the `time` datatype. Time will be loaded to a `text` column. -3. `Clickhouse` does not support the `binary` datatype. Binary will be loaded to a `text` column. When loading from `jsonl`, this will be a base64 string, when loading from parquet this will be -the `binary` object converted to `text`. -4. `Clickhouse` accepts adding columns to a populated table that are not null. +3. `Clickhouse` does not support the `binary` datatype. Binary will be loaded to a `text` column. When loading from `jsonl`, this will be a base64 string, when loading from parquet this will be + the `binary` object converted to `text`. +4. `Clickhouse` accepts adding columns to a populated table that are not null. ## Supported column hints -ClickHouse supports the following [column hints](https://dlthub.com/docs/general-usage/schema#tables-and-columns): +ClickHouse supports the following [column hints](../../general-usage/schema#tables-and-columns): - `primary_key` - marks the column as part of the primary key. Multiple columns can have this hint to create a composite primary key. @@ -122,10 +122,12 @@ By default, tables are created using the `ReplicatedMergeTree` table engine in C ```py from dlt.destinations.adapters import clickhouse_adapter + @dlt.resource() def my_resource(): ... + clickhouse_adapter(my_resource, table_engine_type="merge_tree") ``` @@ -158,9 +160,54 @@ pipeline = dlt.pipeline( ) ``` +### Using Google Cloud Storage as a Staging Area + +dlt supports using Google Cloud Storage (GCS) as a staging area when loading data into ClickHouse. This is handled automatically by +ClickHouse's [GCS table function](https://clickhouse.com/docs/en/sql-reference/table-functions/gcs) which dlt uses under the hood. + +Somewhat annoyingly, the GCS table function only supports authentication using Hash-based Message Authentication Code (HMAC) keys. To enable this, GCS provides an S3 compatibility mode that emulates the Amazon S3 +API. ClickHouse takes advantage of this to allow accessing GCS buckets via its S3 integration. + +To set up GCS staging with HMAC authentication in dlt: + +1. Create HMAC keys for your GCS service account by following the [Google Cloud guide](https://cloud.google.com/storage/docs/authentication/managing-hmackeys#create). + +2. Configure the HMAC keys as well as the `client_email`, `project_id` and `private_key` for your service account in your dlt project's ClickHouse destination settings in `config.toml`: + +```toml +[destination.filesystem] +bucket_url = "gs://dlt-ci" + +[destination.filesystem.credentials] +project_id = "a-cool-project" +client_email = "my-service-account@a-cool-project.iam.gserviceaccount.com" +private_key = "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkaslkdjflasjnkdcopauihj...wEiEx7y+mx\nNffxQBqVVej2n/D93xY99pM=\n-----END PRIVATE KEY-----\n" + +[destination.clickhouse.credentials] +database = "dlt" +username = "dlt" +password = "Dlt*12345789234567" +host = "localhost" +port = 9440 +secure = 1 +gcp_access_key_id = "JFJ$$*f2058024835jFffsadf" +gcp_secret_access_key = "DFJdwslf2hf57)%$02jaflsedjfasoi" +``` + +Note: In addition to the HMAC keys (`gcp_access_key_id` and `gcp_secret_access_key`), you now need to provide the `client_email`, `project_id` and `private_key` for your service account +under `[destination.filesystem.credentials]`. +This is because the GCS staging support is now implemented as a temporary workaround and is still unoptimized. + +dlt will pass these credentials to ClickHouse which will handle the authentication and GCS access. + +There is active work in progress to simplify and improve the GCS staging setup for the ClickHouse dlt destination in the future. Proper GCS staging support is being tracked in these GitHub issues: + +- [Make filesystem destination work with gcs in s3 compatibility mode](https://github.com/dlt-hub/dlt/issues/1272) +- [GCS staging area support](https://github.com/dlt-hub/dlt/issues/1181) + ### dbt support -Integration with [dbt](../transformations/dbt/dbt.md) is generally supported via dbt-clickhouse, but not tested by us at this time. +Integration with [dbt](../transformations/dbt/dbt.md) is generally supported via dbt-clickhouse, but not tested by us. ### Syncing of `dlt` state From 1c8e4ef8c173ca0f896dd10419bb3d297950783f Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Wed, 24 Apr 2024 18:21:47 +0200 Subject: [PATCH 122/127] Add `http_port` to docs #1055 Signed-off-by: Marcel Coetzee --- .../dlt-ecosystem/destinations/clickhouse.md | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index 0743dc3f35..01605e7710 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -64,10 +64,23 @@ To load data into ClickHouse, you need to create a ClickHouse database. While we password = "Dlt*12345789234567" # ClickHouse password if any host = "localhost" # ClickHouse server host port = 9000 # ClickHouse HTTP port, default is 9000 - http_port = 8443 # HTTP Port to connect to ClickHouse server's HTTP interface. + http_port = 8443 # HTTP Port to connect to ClickHouse server's HTTP interface. Defaults to 8443. secure = 1 # Set to 1 if using HTTPS, else 0. dataset_table_separator = "___" # Separator for dataset table names from dataset. ``` + + :::info http_port + The `http_port` parameter specifies the port number to use when connecting to the ClickHouse server's HTTP interface. This is different from default port 9000, which is used for the native TCP + protocol. + + You must set `http_port` if you are not using external staging (i.e. you don't set the staging parameter in your pipeline). This is because dlt's built-in ClickHouse local storage staging uses the + [clickhouse-connect](https://github.com/ClickHouse/clickhouse-connect) library, which communicates with ClickHouse over HTTP. + + Make sure your ClickHouse server is configured to accept HTTP connections on the port specified by `http_port`. For example, if you set `http_port = 8443`, then ClickHouse should be listening for + HTTP + requests on port 8443. If you are using external staging, you can omit the `http_port` parameter, since clickhouse-connect will not be used in this case. + ::: + 2. You can pass a database connection string similar to the one used by the `clickhouse-driver` library. The credentials above will look like this: ```toml @@ -165,7 +178,8 @@ pipeline = dlt.pipeline( dlt supports using Google Cloud Storage (GCS) as a staging area when loading data into ClickHouse. This is handled automatically by ClickHouse's [GCS table function](https://clickhouse.com/docs/en/sql-reference/table-functions/gcs) which dlt uses under the hood. -Somewhat annoyingly, the GCS table function only supports authentication using Hash-based Message Authentication Code (HMAC) keys. To enable this, GCS provides an S3 compatibility mode that emulates the Amazon S3 +Somewhat annoyingly, the GCS table function only supports authentication using Hash-based Message Authentication Code (HMAC) keys. To enable this, GCS provides an S3 compatibility mode that emulates +the Amazon S3 API. ClickHouse takes advantage of this to allow accessing GCS buckets via its S3 integration. To set up GCS staging with HMAC authentication in dlt: From a8887c41d84faceb7938534159ed5ed0133aaaa8 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 25 Apr 2024 11:04:15 +0200 Subject: [PATCH 123/127] fix import after merge --- tests/load/pipeline/test_clickhouse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/load/pipeline/test_clickhouse.py b/tests/load/pipeline/test_clickhouse.py index 9d6c6ed8d7..2ba5cfdcb8 100644 --- a/tests/load/pipeline/test_clickhouse.py +++ b/tests/load/pipeline/test_clickhouse.py @@ -8,8 +8,8 @@ from tests.load.pipeline.utils import ( destinations_configs, DestinationTestConfiguration, - load_table_counts, ) +from tests.pipeline.utils import load_table_counts @pytest.mark.parametrize( From 2c21e1938bd4e4b983d116cfff8e58d9e7fc9126 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 25 Apr 2024 11:06:54 +0200 Subject: [PATCH 124/127] small changes to the docs --- docs/website/docs/dlt-ecosystem/destinations/clickhouse.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md index 01605e7710..ea187e54eb 100644 --- a/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md +++ b/docs/website/docs/dlt-ecosystem/destinations/clickhouse.md @@ -115,12 +115,12 @@ destination. The `clickhouse` destination has a few specific deviations from the default sql destinations: -1. `Clickhouse` has an experimental `object` datatype, but we have found it to be a bit unpredictable, so the dlt clickhouse destination will load the complex datatype to a `text` column. If you need - this feature, get in touch with our Slack community, and we will consider adding it. +1. `Clickhouse` has an experimental `object` datatype, but we have found it to be a bit unpredictable, so the dlt clickhouse destination will load the complex datatype to a `text` column. If you need this feature, get in touch with our Slack community, and we will consider adding it. 2. `Clickhouse` does not support the `time` datatype. Time will be loaded to a `text` column. 3. `Clickhouse` does not support the `binary` datatype. Binary will be loaded to a `text` column. When loading from `jsonl`, this will be a base64 string, when loading from parquet this will be the `binary` object converted to `text`. 4. `Clickhouse` accepts adding columns to a populated table that are not null. +5. `Clickhouse` can produce rounding errors under certain conditions when using the float / double datatype. Make sure to use decimal if you cannot afford to have rounding errors. Loading the value 12.7001 to a double column with the loader file format jsonl set will predictbly produce a rounding error for example. ## Supported column hints @@ -178,7 +178,7 @@ pipeline = dlt.pipeline( dlt supports using Google Cloud Storage (GCS) as a staging area when loading data into ClickHouse. This is handled automatically by ClickHouse's [GCS table function](https://clickhouse.com/docs/en/sql-reference/table-functions/gcs) which dlt uses under the hood. -Somewhat annoyingly, the GCS table function only supports authentication using Hash-based Message Authentication Code (HMAC) keys. To enable this, GCS provides an S3 compatibility mode that emulates +The clickhouse GCS table function only supports authentication using Hash-based Message Authentication Code (HMAC) keys. To enable this, GCS provides an S3 compatibility mode that emulates the Amazon S3 API. ClickHouse takes advantage of this to allow accessing GCS buckets via its S3 integration. From 76279453e1ce14839ad9ec343ec5d67a46dfa006 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 25 Apr 2024 11:20:09 +0200 Subject: [PATCH 125/127] tolerate rounding errors when loading from jsonl --- tests/cases.py | 2 +- tests/load/pipeline/test_arrow_loading.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/cases.py b/tests/cases.py index 15e3fef091..2b655fdc8b 100644 --- a/tests/cases.py +++ b/tests/cases.py @@ -311,7 +311,7 @@ def arrow_table_all_data_types( "datetime": pd.date_range("2021-01-01T01:02:03.1234", periods=num_rows, tz=tz, unit="us"), "bool": [random.choice([True, False]) for _ in range(num_rows)], "string_null": [random.choice(ascii_lowercase) for _ in range(num_rows - 1)] + [None], - "float_null": [round(random.uniform(0, 100), 5) for _ in range(num_rows - 1)] + [ + "float_null": [round(random.uniform(0, 100), 4) for _ in range(num_rows - 1)] + [ None ], # decrease precision "null": pd.Series([None for _ in range(num_rows)]), diff --git a/tests/load/pipeline/test_arrow_loading.py b/tests/load/pipeline/test_arrow_loading.py index 76e13c5293..c5a37ee5bb 100644 --- a/tests/load/pipeline/test_arrow_loading.py +++ b/tests/load/pipeline/test_arrow_loading.py @@ -125,6 +125,13 @@ def some_data(): for i in range(len(row)): if isinstance(row[i], datetime): row[i] = pendulum.instance(row[i]) + # clickhouse produces rounding errors on double with jsonl, so we round the result coming from there + if ( + destination_config.destination == "clickhouse" + and destination_config.file_format == "jsonl" + and isinstance(row[i], float) + ): + row[i] = round(row[i], 4) expected = sorted([list(r.values()) for r in records]) From 13f4b1cfa7b27302726fb5274a0d53c89ddc63c3 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 25 Apr 2024 15:08:35 +0200 Subject: [PATCH 126/127] post devel merge fix --- dlt/common/destination/capabilities.py | 1 - dlt/destinations/impl/clickhouse/__init__.py | 1 - tests/load/pipeline/test_scd2.py | 2 +- 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/dlt/common/destination/capabilities.py b/dlt/common/destination/capabilities.py index 9c633e6a20..7ee1cb5695 100644 --- a/dlt/common/destination/capabilities.py +++ b/dlt/common/destination/capabilities.py @@ -54,7 +54,6 @@ class DestinationCapabilitiesContext(ContainerInjectableContext): insert_values_writer_type: str = "default" supports_multiple_statements: bool = True supports_clone_table: bool = False - scd2_high_timestamp: pendulum.DateTime = HIGH_TS """High timestamp used to indicate active records in `scd2` merge strategy.""" """Destination supports CREATE TABLE ... CLONE ... statements""" diff --git a/dlt/destinations/impl/clickhouse/__init__.py b/dlt/destinations/impl/clickhouse/__init__.py index 91cff79d53..bead136828 100644 --- a/dlt/destinations/impl/clickhouse/__init__.py +++ b/dlt/destinations/impl/clickhouse/__init__.py @@ -25,7 +25,6 @@ def capabilities() -> DestinationCapabilitiesContext: # https://stackoverflow.com/questions/68358686/what-is-the-maximum-length-of-a-column-in-clickhouse-can-it-be-modified caps.max_identifier_length = 255 caps.max_column_identifier_length = 255 - caps.scd2_high_timestamp = pendulum.datetime(2299, 12, 31) # this is the max datetime... # ClickHouse has no max `String` type length. caps.max_text_data_type_length = sys.maxsize diff --git a/tests/load/pipeline/test_scd2.py b/tests/load/pipeline/test_scd2.py index d5d76580d7..e8baa33ff3 100644 --- a/tests/load/pipeline/test_scd2.py +++ b/tests/load/pipeline/test_scd2.py @@ -81,7 +81,7 @@ def assert_records_as_set(actual: List[Dict[str, Any]], expected: List[Dict[str, for dconf in destinations_configs(default_sql_configs=True, supports_merge=True) ] + [ - (dconf, True, None, pendulum.DateTime(3234, 12, 31, 22, 2, 59)) # arbitrary timestamp + (dconf, True, None, pendulum.DateTime(2099, 12, 31, 22, 2, 59)) # arbitrary timestamp for dconf in destinations_configs(default_sql_configs=True, supports_merge=True) ] + [ # test nested columns and validity column name configuration only for postgres and duckdb From 244ed77099504c49a8a1f146415be57aa88197e6 Mon Sep 17 00:00:00 2001 From: Dave Date: Fri, 26 Apr 2024 10:36:50 +0200 Subject: [PATCH 127/127] remove unneeded stuff from scd2 merge --- dlt/common/destination/capabilities.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/dlt/common/destination/capabilities.py b/dlt/common/destination/capabilities.py index 7ee1cb5695..e74f5a980d 100644 --- a/dlt/common/destination/capabilities.py +++ b/dlt/common/destination/capabilities.py @@ -21,7 +21,6 @@ # sql - any sql statement TLoaderFileFormat = Literal["jsonl", "typed-jsonl", "insert_values", "parquet", "csv"] ALL_SUPPORTED_FILE_FORMATS: Set[TLoaderFileFormat] = set(get_args(TLoaderFileFormat)) -HIGH_TS = pendulum.datetime(9999, 12, 31) @configspec @@ -54,7 +53,6 @@ class DestinationCapabilitiesContext(ContainerInjectableContext): insert_values_writer_type: str = "default" supports_multiple_statements: bool = True supports_clone_table: bool = False - """High timestamp used to indicate active records in `scd2` merge strategy.""" """Destination supports CREATE TABLE ... CLONE ... statements""" max_table_nesting: Optional[int] = None # destination can overwrite max table nesting