From d992e4fb6012d5a3995a919e5de6e6f8fced5238 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Thu, 3 Oct 2024 14:51:56 -0400 Subject: [PATCH] [MAINTENANCE] Remove Python 3.8 Support (EOL) (#10441) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Tyler Hoffman --- .github/workflows/ci.yml | 51 ++++++------------- ci/azure-pipelines-cloud-integration.yml | 6 +-- ci/azure-pipelines-contrib.yml | 2 +- ci/azure-pipelines-docs-integration.yml | 4 +- ci/azure-pipelines-os-integration.yml | 8 +-- ...ure-pipelines-sqlalchemy-compatibility.yml | 22 +++++--- ci/constraints-test/py37-min-install.txt | 2 - ci/constraints-test/py38-min-install.txt | 3 -- ci/dev-install-matrix.yml | 2 +- ci/user-install-matrix.yml | 2 +- contrib/experimental/setup.py | 4 +- docker/Dockerfile.tests | 4 +- .../set_up_a_gx_environment/install_python.md | 2 +- ...-pipelines-manual-staging-json-to-prod.yml | 2 +- .../compatibility/typing_extensions.py | 7 ++- .../datasource/fluent/sources.py | 2 +- .../pandas_data_sampler.py | 2 +- .../expectations/expectation.py | 4 +- pyproject.toml | 5 +- requirements.txt | 2 - setup.cfg | 2 +- setup.py | 6 ++- tests/analytics/test_analytics.py | 18 ++++--- tests/checkpoint/test_checkpoint.py | 38 ++++++++------ tests/checkpoint/test_checkpoint_id_pk.py | 5 +- tests/conftest.py | 24 +++++---- .../test_datasource_store_cloud_backend.py | 11 ++-- .../test_data_context_data_docs_api.py | 5 +- tests/datasource/fluent/test_batch.py | 16 +++--- tests/datasource/fluent/test_schemas.py | 2 +- tests/integration/spark/test_spark_connect.py | 3 +- tests/validator/test_v1_validator.py | 8 +-- tests/validator/test_validation_graph.py | 36 +++++++------ tests/validator/test_validator.py | 2 +- 34 files changed, 158 insertions(+), 154 deletions(-) delete mode 100644 ci/constraints-test/py37-min-install.txt delete mode 100644 ci/constraints-test/py38-min-install.txt diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1f1d4dd480a1..4a497f4835f7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -62,7 +62,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: "pip" cache-dependency-path: | requirements-types.txt @@ -130,7 +130,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Run docs_snippet_checker run: | yarn install @@ -161,7 +161,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: "pip" cache-dependency-path: | reqs/requirements-dev-test.txt @@ -201,7 +201,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} @@ -253,7 +253,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: "pip" cache-dependency-path: | reqs/requirements-dev-test.txt @@ -292,7 +292,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: "pip" cache-dependency-path: reqs/requirements-dev-test.txt - name: Install dependencies @@ -344,7 +344,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache-dependency-path: | reqs/requirements-dev-test.txt setup.py @@ -435,15 +435,13 @@ jobs: - spark - spark_connect - trino - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] exclude: # TODO: would like to adopt `actionlint` pre-commit hook # but false positive here and inability to do an inline ignore # prevents this https://github.com/rhysd/actionlint/issues/237 - - python-version: ${{ github.event_name == 'pull_request' && '3.9' }} - python-version: ${{ github.event_name == 'pull_request' && '3.10' }} - python-version: ${{ github.event_name == 'pull_request' && '3.11' }} - - python-version: ${{ github.event_name == 'merge_group' && '3.9' }} - python-version: ${{ github.event_name == 'merge_group' && '3.10' }} - python-version: ${{ github.event_name == 'merge_group' && '3.11' }} - python-version: "3.12" # need sqlalchemy 2.0 support @@ -500,24 +498,6 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} flags: "${{ matrix.python-version }} ${{ matrix.markers }}" - py38-min-versions: - needs: [unit-tests, static-analysis] - if: github.event.pull_request.draft == false - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.8" - cache: "pip" - cache-dependency-path: reqs/requirements-dev-test.txt - - name: Install dependencies - run: pip install . -c ci/constraints-test/py38-min-install.txt -r reqs/requirements-dev-test.txt - - name: Run the tests - run: invoke ci-tests -m unit --xdist --slowest=10 --timeout=2.0 - py39-min-versions: needs: [unit-tests, static-analysis] if: github.event.pull_request.draft == false @@ -602,7 +582,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: "pip" cache-dependency-path: reqs/requirements-dev-test.txt - name: Install dependencies @@ -619,7 +599,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: "pip" cache-dependency-path: requirements.txt - name: Install dependencies @@ -632,11 +612,12 @@ jobs: needs: [static-analysis] if: github.event.pull_request.draft == false runs-on: ubuntu-latest + env: + GX_PYTHON_EXPERIMENTAL: true # allow for python 3.12+ strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] exclude: - - python-version: ${{ github.event_name == 'pull_request' && '3.9' }} - python-version: ${{ github.event_name == 'pull_request' && '3.10' }} steps: - name: Checkout @@ -662,7 +643,6 @@ jobs: unit-tests, cloud-tests, marker-tests, - py38-min-versions, py39-min-versions, py310-min-versions, py311-min-versions, @@ -681,10 +661,10 @@ jobs: id-token: write steps: - uses: actions/checkout@master - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Update pip run: python -m pip install --upgrade pip - name: Install Twine and Wheel, and prepare packaging @@ -711,7 +691,6 @@ jobs: unit-tests, cloud-tests, marker-tests, - py38-min-versions, py39-min-versions, py310-min-versions, py311-min-versions, diff --git a/ci/azure-pipelines-cloud-integration.yml b/ci/azure-pipelines-cloud-integration.yml index cf04058c2e3e..b1aedc5bd03a 100644 --- a/ci/azure-pipelines-cloud-integration.yml +++ b/ci/azure-pipelines-cloud-integration.yml @@ -7,7 +7,7 @@ stages: - job: bigquery_performance_test timeoutInMinutes: 30 # this should be more than sufficient since the performance typically runs < 5 min variables: - python.version: "3.8" + python.version: "3.9" strategy: matrix: @@ -67,7 +67,7 @@ stages: - job: bigquery_expectations_test timeoutInMinutes: 150 # Each stage runs in about 60 min and 30 min respectively. variables: - python.version: "3.8" + python.version: "3.9" strategy: matrix: @@ -114,7 +114,7 @@ stages: - job: snowflake_expectations_test timeoutInMinutes: 45 # snowflake tests will run in about 30 min variables: - python.version: "3.8" + python.version: "3.9" steps: - task: UsePythonVersion@0 diff --git a/ci/azure-pipelines-contrib.yml b/ci/azure-pipelines-contrib.yml index af41069a1c68..9a070acdbade 100644 --- a/ci/azure-pipelines-contrib.yml +++ b/ci/azure-pipelines-contrib.yml @@ -5,7 +5,7 @@ variables: contrib_major: 0 contrib_minor: 1 isDevelop: $[eq(variables['Build.SourceBranch'], 'refs/heads/develop')] - python.version: "3.8" + python.version: "3.9" name: $(Date:yyyyMMdd)$(Rev:rrr) diff --git a/ci/azure-pipelines-docs-integration.yml b/ci/azure-pipelines-docs-integration.yml index 8462ec2e32bf..c0d5bbf62e5e 100644 --- a/ci/azure-pipelines-docs-integration.yml +++ b/ci/azure-pipelines-docs-integration.yml @@ -23,7 +23,7 @@ stages: timeoutInMinutes: 30 condition: and(or(eq(variables.isDevelop, true), eq(variables.isManual, true)), ne(variables['SYSTEM.PULLREQUEST.ISFORK'], true)) variables: - python.version: "3.8" + python.version: "3.9" spark.version: "3.3.2" matching_aws_java_sdk_bundle_version: "1.11.1026" @@ -61,7 +61,7 @@ stages: timeoutInMinutes: 30 condition: or(eq(variables.isDevelop, true), eq(variables.isManual, true)) variables: - python.version: "3.8" + python.version: "3.9" steps: - task: UsePythonVersion@0 diff --git a/ci/azure-pipelines-os-integration.yml b/ci/azure-pipelines-os-integration.yml index 766659746634..3e241b3af0c6 100644 --- a/ci/azure-pipelines-os-integration.yml +++ b/ci/azure-pipelines-os-integration.yml @@ -129,7 +129,7 @@ stages: mysql: mysql variables: - python.version: "3.8" + python.version: "3.9" steps: - task: UsePythonVersion@0 @@ -170,7 +170,7 @@ stages: mssql: mssql variables: - python.version: "3.8" + python.version: "3.9" steps: - task: UsePythonVersion@0 @@ -201,7 +201,7 @@ stages: postgres: postgres variables: - python.version: "3.8" + python.version: "3.9" steps: - task: UsePythonVersion@0 @@ -229,7 +229,7 @@ stages: trino: trino variables: - python.version: "3.8" + python.version: "3.9" steps: - task: UsePythonVersion@0 diff --git a/ci/azure-pipelines-sqlalchemy-compatibility.yml b/ci/azure-pipelines-sqlalchemy-compatibility.yml index 6bf031c2843b..dda083e8e6a4 100644 --- a/ci/azure-pipelines-sqlalchemy-compatibility.yml +++ b/ci/azure-pipelines-sqlalchemy-compatibility.yml @@ -71,8 +71,8 @@ stages: steps: - task: UsePythonVersion@0 inputs: - versionSpec: 3.8 - displayName: "Use Python 3.8" + versionSpec: 3.9 + displayName: "Use Python 3.9" - script: | pip install $(grep -E '^(black|invoke|ruff)' reqs/requirements-dev-contrib.txt) @@ -92,8 +92,8 @@ stages: steps: - task: UsePythonVersion@0 inputs: - versionSpec: "3.8" - displayName: "Use Python 3.8" + versionSpec: "3.9" + displayName: "Use Python 3.9" - script: | pip install . @@ -128,8 +128,8 @@ stages: steps: - task: UsePythonVersion@0 inputs: - versionSpec: "3.8" - displayName: "Use Python 3.8" + versionSpec: "3.9" + displayName: "Use Python 3.9" - bash: python -m pip install --upgrade pip displayName: "Update pip" @@ -139,6 +139,9 @@ stages: echo "SQLAlchemy$(comparison)2.0.0" >> constraints-dev-temp.txt echo "pandas$(comparison)2.0.0" >> constraints-dev-temp.txt + # Constrain numpy to prevent running into a number of DeprecationWarnings + echo "numpy<1.25" >> constraints-dev-temp.txt + pip install --constraint constraints-dev-temp.txt ".[test, postgresql]" pytest-azurepipelines displayName: "Install dependencies using SQLAlchemy base version $(sqlalchemy_base_version)" @@ -182,8 +185,8 @@ stages: steps: - task: UsePythonVersion@0 inputs: - versionSpec: "3.8" - displayName: "Use Python 3.8" + versionSpec: "3.9" + displayName: "Use Python 3.9" - bash: python -m pip install --upgrade pip displayName: "Update pip" @@ -196,6 +199,9 @@ stages: echo "SQLAlchemy$(comparison)2.0.0" >> constraints-dev-temp.txt echo "pandas$(comparison)2.0.0" >> constraints-dev-temp.txt + # Constrain numpy to prevent running into a number of DeprecationWarnings + echo "numpy<1.25" >> constraints-dev-temp.txt + pip install --constraint constraints-dev-temp.txt ".[test, mssql]" pytest-azurepipelines displayName: "Install dependencies using SQLAlchemy base version $(sqlalchemy_base_version)" diff --git a/ci/constraints-test/py37-min-install.txt b/ci/constraints-test/py37-min-install.txt deleted file mode 100644 index 58001a34858d..000000000000 --- a/ci/constraints-test/py37-min-install.txt +++ /dev/null @@ -1,2 +0,0 @@ -numpy==1.18.5 -pandas==1.1.0 diff --git a/ci/constraints-test/py38-min-install.txt b/ci/constraints-test/py38-min-install.txt deleted file mode 100644 index adae5d305821..000000000000 --- a/ci/constraints-test/py38-min-install.txt +++ /dev/null @@ -1,3 +0,0 @@ -numpy==1.20.3 -pandas==1.1.0 -sqlalchemy<2.0.0 # SQLAlchemy 2.0.0 is not compatible with pandas < 2.0.0 diff --git a/ci/dev-install-matrix.yml b/ci/dev-install-matrix.yml index 008d17fb3230..ebaaef042cb0 100644 --- a/ci/dev-install-matrix.yml +++ b/ci/dev-install-matrix.yml @@ -1,6 +1,6 @@ parameters: vmImage: "" # Must be specified in primary YAML - pythonVersion: ["3.8", "3.9", "3.10", "3.11"] + pythonVersion: ["3.9", "3.10", "3.11"] jobs: - ${{ each pythonVersion in parameters.pythonVersion }}: diff --git a/ci/user-install-matrix.yml b/ci/user-install-matrix.yml index de6cda9b68ad..52bff637055f 100644 --- a/ci/user-install-matrix.yml +++ b/ci/user-install-matrix.yml @@ -1,6 +1,6 @@ parameters: vmImage: "" # Must be specified in primary YAML - pythonVersion: ["3.8", "3.9", "3.10", "3.11"] + pythonVersion: ["3.9", "3.10", "3.11"] jobs: - ${{ each pythonVersion in parameters.pythonVersion }}: diff --git a/contrib/experimental/setup.py b/contrib/experimental/setup.py index 099971a0453d..6240b3d438cf 100644 --- a/contrib/experimental/setup.py +++ b/contrib/experimental/setup.py @@ -32,9 +32,9 @@ "Topic :: Software Development :: Testing", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ], } diff --git a/docker/Dockerfile.tests b/docker/Dockerfile.tests index ca4edf632379..59d366e95add 100644 --- a/docker/Dockerfile.tests +++ b/docker/Dockerfile.tests @@ -1,5 +1,5 @@ -# PYTHON_VERSION in ["3.7", "3.8", "3.9", "3.10"] -ARG PYTHON_VERSION=3.8 +# PYTHON_VERSION in ["3.9", "3.10"] +ARG PYTHON_VERSION=3.9 # SOURCE in ["local", "github"] ARG SOURCE=local diff --git a/docs/docusaurus/docs/core/set_up_a_gx_environment/install_python.md b/docs/docusaurus/docs/core/set_up_a_gx_environment/install_python.md index f9cd59b2a30a..87fe3052c099 100644 --- a/docs/docusaurus/docs/core/set_up_a_gx_environment/install_python.md +++ b/docs/docusaurus/docs/core/set_up_a_gx_environment/install_python.md @@ -31,7 +31,7 @@ To use Great Expectations (GX) you need to install Python and the GX Core Python You should receive a response similar to the following: ```shell title="Terminal output" - Python 3.8.6 + Python 3.9.19 ``` ## Optional. Create a virtual environment diff --git a/docs/expectation_gallery/azure-pipelines-manual-staging-json-to-prod.yml b/docs/expectation_gallery/azure-pipelines-manual-staging-json-to-prod.yml index c6c883c839fb..57564484329c 100644 --- a/docs/expectation_gallery/azure-pipelines-manual-staging-json-to-prod.yml +++ b/docs/expectation_gallery/azure-pipelines-manual-staging-json-to-prod.yml @@ -18,7 +18,7 @@ stages: condition: eq(variables.isManual, true) timeoutInMinutes: 15 variables: - python.version: "3.8" + python.version: "3.9" steps: - task: UsePythonVersion@0 diff --git a/great_expectations/compatibility/typing_extensions.py b/great_expectations/compatibility/typing_extensions.py index 21e03d8ad31a..b1d928ac5b3c 100644 --- a/great_expectations/compatibility/typing_extensions.py +++ b/great_expectations/compatibility/typing_extensions.py @@ -3,11 +3,10 @@ from typing import Any, Callable, TypeVar try: - from typing import ( # type: ignore[attr-defined] # only exists in some python versions - Annotated, - ) -except ImportError: + # default to the typing_extensions version if available as it contains bug fixes & improvements from typing_extensions import Annotated +except ImportError: + from typing import Annotated # type: ignore[assignment] try: from typing_extensions import override diff --git a/great_expectations/datasource/fluent/sources.py b/great_expectations/datasource/fluent/sources.py index 105402f69291..2c7c31e63830 100644 --- a/great_expectations/datasource/fluent/sources.py +++ b/great_expectations/datasource/fluent/sources.py @@ -128,7 +128,7 @@ def register_datasource(cls, ds_type: Type[Datasource]) -> None: ) # rollback type registrations if exception occurs - with cls.type_lookup.transaction() as ds_type_lookup, ds_type._type_lookup.transaction() as asset_type_lookup: # noqa: E501 + with cls.type_lookup.transaction() as ds_type_lookup, ds_type._type_lookup.transaction() as asset_type_lookup: # fmt: skip # noqa: E501 cls._register_assets(ds_type, asset_type_lookup=asset_type_lookup) cls._register_datasource( diff --git a/great_expectations/execution_engine/partition_and_sample/pandas_data_sampler.py b/great_expectations/execution_engine/partition_and_sample/pandas_data_sampler.py index 130055e0e446..25830e02e911 100644 --- a/great_expectations/execution_engine/partition_and_sample/pandas_data_sampler.py +++ b/great_expectations/execution_engine/partition_and_sample/pandas_data_sampler.py @@ -159,7 +159,7 @@ def sample_using_hash( ) ) - matches = df[column_name].map( + matches: pd.Series = df[column_name].map( lambda x: hash_func(str(x).encode()).hexdigest()[-1 * hash_digits :] == hash_value ) return df[matches] diff --git a/great_expectations/expectations/expectation.py b/great_expectations/expectations/expectation.py index 0c1923d57677..5f2fc0722da6 100644 --- a/great_expectations/expectations/expectation.py +++ b/great_expectations/expectations/expectation.py @@ -1826,7 +1826,7 @@ class ColumnMapExpectation(BatchExpectation, ABC): """ # noqa: E501 column: StrictStr = Field(min_length=1, description=COLUMN_DESCRIPTION) - mostly: Mostly = 1.0 + mostly: Mostly = 1.0 # type: ignore[assignment] # TODO: Fix in CORE-412 catch_exceptions: bool = True @@ -2092,7 +2092,7 @@ class ColumnPairMapExpectation(BatchExpectation, ABC): column_A: StrictStr = Field(min_length=1, description=COLUMN_A_DESCRIPTION) column_B: StrictStr = Field(min_length=1, description=COLUMN_B_DESCRIPTION) - mostly: Mostly = 1.0 + mostly: Mostly = 1.0 # type: ignore[assignment] # TODO: Fix in CORE-412 catch_exceptions: bool = True diff --git a/pyproject.toml b/pyproject.toml index 6067d0b140a3..b01ab4965434 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ requires = ["setuptools", "wheel"] # build-backend = "setuptools.build_meta" [tool.mypy] -python_version = "3.8" +python_version = "3.9" plugins = [ "pydantic.mypy", "sqlalchemy.ext.mypy.plugin" @@ -243,7 +243,7 @@ warn_required_dynamic_aliases = true warn_untyped_fields = true [tool.ruff] -target-version = "py38" +target-version = "py39" line-length = 100 lint.preview = true # This enables preview rules for specified rules e.g. NPY201 lint.explicit-preview-rules = true # https://docs.astral.sh/ruff/preview/#selecting-single-preview-rules @@ -317,6 +317,7 @@ lint.ignore = [ # https://beta.ruff.rs/docs/rules/#flake8-pyi-pyi "PYI053", # string-or-bytes-too-long - causes mypy to fail on some of our type stubs "PYI054", # numeric-literal-too-long - causes mypy to fail on some of our type stubs + "UP035", # TODO: remove once min version of pydantic supports using collections.abc types # https://beta.ruff.rs/docs/rules/#flake8-bugbear-b # TODO: enable these "B904", # raise-without-from-inside-except diff --git a/requirements.txt b/requirements.txt index 73d45bc164ba..19d414a0c6e7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,12 +7,10 @@ jsonschema>=2.5.1 makefun>=1.7.0,<2 marshmallow>=3.7.1,<4.0.0 mistune>=0.8.4 -numpy>=1.20.3; python_version == "3.8" numpy>=1.21.6; python_version == "3.9" numpy>=1.22.4; python_version >= "3.10" numpy>=1.26.0; python_version >= "3.12" packaging -pandas>=1.1.0,<2.2; python_version <= "3.8" pandas>=1.1.3,<2.2; python_version == "3.9" pandas>=1.3.0,<2.2; python_version >= "3.10" pandas<2.2; python_version >= "3.12" diff --git a/setup.cfg b/setup.cfg index 789332ca6714..022d37ed2be6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,4 +12,4 @@ tag_prefix = parentdir_prefix = great_expectations- [options] -python_requires = >=3.8 +python_requires = >=3.9 diff --git a/setup.py b/setup.py index abef16e9d5d6..d6d7093b18ae 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ import versioneer -SUPPORTED_PYTHON = ">=3.8,<3.12" +SUPPORTED_PYTHON = ">=3.9,<3.12" def get_python_requires() -> str: @@ -17,6 +17,9 @@ def get_python_requires() -> str: return a version with no upper-bound. """ if os.getenv("GX_PYTHON_EXPERIMENTAL"): + return ">=3.9" + elif os.getenv("NETLIFY"): + # Netlify only supports Python 3.8 (EOL 2024-10-31) and 2.7 (EOl 2020-01-01). return ">=3.8" return SUPPORTED_PYTHON @@ -131,7 +134,6 @@ def get_extras_require(): "Topic :: Software Development :: Testing", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/tests/analytics/test_analytics.py b/tests/analytics/test_analytics.py index 5a9cbd71aadc..be7e582df7e7 100644 --- a/tests/analytics/test_analytics.py +++ b/tests/analytics/test_analytics.py @@ -78,9 +78,12 @@ def test_event_identifiers(analytics_config): def test_ephemeral_context_init(monkeypatch): monkeypatch.setattr(ENV_CONFIG, "gx_analytics_enabled", True) # Enable usage stats - with mock.patch( - "great_expectations.data_context.data_context.abstract_data_context.init_analytics" - ) as mock_init, mock.patch("posthog.capture") as mock_submit: + with ( + mock.patch( + "great_expectations.data_context.data_context.abstract_data_context.init_analytics" + ) as mock_init, + mock.patch("posthog.capture") as mock_submit, + ): _ = gx.get_context(mode="ephemeral") mock_init.assert_called_once_with( @@ -107,9 +110,12 @@ def test_ephemeral_context_init(monkeypatch): def test_cloud_context_init(cloud_api_fake, cloud_details, monkeypatch): monkeypatch.setattr(ENV_CONFIG, "gx_analytics_enabled", True) # Enable usage stats - with mock.patch( - "great_expectations.data_context.data_context.cloud_data_context.init_analytics" - ) as mock_init, mock.patch("posthog.capture") as mock_submit: + with ( + mock.patch( + "great_expectations.data_context.data_context.cloud_data_context.init_analytics" + ) as mock_init, + mock.patch("posthog.capture") as mock_submit, + ): _ = gx.get_context( cloud_access_token=cloud_details.access_token, cloud_organization_id=cloud_details.org_id, diff --git a/tests/checkpoint/test_checkpoint.py b/tests/checkpoint/test_checkpoint.py index 6c15ec6aaa99..beef9845ad8b 100644 --- a/tests/checkpoint/test_checkpoint.py +++ b/tests/checkpoint/test_checkpoint.py @@ -188,14 +188,17 @@ def validation_definition_1( data=mocker.Mock(spec=BatchDefinition), suite=mocker.Mock(spec=ExpectationSuite), ) - with mock.patch.object( - ValidationDefinition, - "json", - return_value=json.dumps({"id": str(uuid.uuid4()), "name": name}), - ), mock.patch.object( - ValidationDefinition, - "is_fresh", - return_value=ValidationDefinitionFreshnessDiagnostics(errors=[]), + with ( + mock.patch.object( + ValidationDefinition, + "json", + return_value=json.dumps({"id": str(uuid.uuid4()), "name": name}), + ), + mock.patch.object( + ValidationDefinition, + "is_fresh", + return_value=ValidationDefinitionFreshnessDiagnostics(errors=[]), + ), ): yield in_memory_context.validation_definitions.add(vc) @@ -209,14 +212,17 @@ def validation_definition_2( data=mocker.Mock(spec=BatchDefinition), suite=mocker.Mock(spec=ExpectationSuite), ) - with mock.patch.object( - ValidationDefinition, - "json", - return_value=json.dumps({"id": str(uuid.uuid4()), "name": name}), - ), mock.patch.object( - ValidationDefinition, - "is_fresh", - return_value=ValidationDefinitionFreshnessDiagnostics(errors=[]), + with ( + mock.patch.object( + ValidationDefinition, + "json", + return_value=json.dumps({"id": str(uuid.uuid4()), "name": name}), + ), + mock.patch.object( + ValidationDefinition, + "is_fresh", + return_value=ValidationDefinitionFreshnessDiagnostics(errors=[]), + ), ): yield in_memory_context.validation_definitions.add(vc) diff --git a/tests/checkpoint/test_checkpoint_id_pk.py b/tests/checkpoint/test_checkpoint_id_pk.py index dadbb3f7c224..1b9d4262d23c 100644 --- a/tests/checkpoint/test_checkpoint_id_pk.py +++ b/tests/checkpoint/test_checkpoint_id_pk.py @@ -81,13 +81,14 @@ def expected_sql_query_output() -> str: @pytest.fixture def expect_column_values_to_be_in_set() -> gxe.ExpectColumnValuesToBeInSet: - return gxe.ExpectColumnValuesToBeInSet(column="animals", value_set=["cat", "fish", "dog"]) + return gxe.ExpectColumnValuesToBeInSet(column="animals", value_set=["cat", "fish", "dog"]) # type: ignore[arg-type] # TODO: Fix in CORE-412 @pytest.fixture def expect_column_values_to_not_be_in_set() -> gxe.ExpectColumnValuesToNotBeInSet: return gxe.ExpectColumnValuesToNotBeInSet( - column="animals", value_set=["giraffe", "lion", "zebra"] + column="animals", + value_set=["giraffe", "lion", "zebra"], # type: ignore[arg-type] # TODO: Fix in CORE-412 ) diff --git a/tests/conftest.py b/tests/conftest.py index 6ea046a6bf5c..9ee7291b4331 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1805,16 +1805,20 @@ def mocked_config(*args, **kwargs) -> DataContextConfig: def mocked_get_cloud_config(*args, **kwargs) -> GXCloudConfig: return ge_cloud_config - with mock.patch( - "great_expectations.data_context.data_context.serializable_data_context.SerializableDataContext._save_project_config" - ), mock.patch( - "great_expectations.data_context.data_context.cloud_data_context.CloudDataContext.retrieve_data_context_config_from_cloud", - autospec=True, - side_effect=mocked_config, - ), mock.patch( - "great_expectations.data_context.data_context.CloudDataContext.get_cloud_config", - autospec=True, - side_effect=mocked_get_cloud_config, + with ( + mock.patch( + "great_expectations.data_context.data_context.serializable_data_context.SerializableDataContext._save_project_config" + ), + mock.patch( + "great_expectations.data_context.data_context.cloud_data_context.CloudDataContext.retrieve_data_context_config_from_cloud", + autospec=True, + side_effect=mocked_config, + ), + mock.patch( + "great_expectations.data_context.data_context.CloudDataContext.get_cloud_config", + autospec=True, + side_effect=mocked_get_cloud_config, + ), ): context = CloudDataContext(context_root_dir=project_path) diff --git a/tests/data_context/store/test_datasource_store_cloud_backend.py b/tests/data_context/store/test_datasource_store_cloud_backend.py index a338599d7627..4e21ecabba84 100644 --- a/tests/data_context/store/test_datasource_store_cloud_backend.py +++ b/tests/data_context/store/test_datasource_store_cloud_backend.py @@ -78,11 +78,12 @@ def mocked_response(*args, **kwargs): 200, ) - with mock.patch( - "requests.Session.get", autospec=True, side_effect=mocked_response - ) as mock_get, mock.patch( - "great_expectations.data_context.store.DatasourceStore.has_key", autospec=True - ) as mock_has_key: + with ( + mock.patch("requests.Session.get", autospec=True, side_effect=mocked_response) as mock_get, + mock.patch( + "great_expectations.data_context.store.DatasourceStore.has_key", autospec=True + ) as mock_has_key, + ): # Mocking has_key so that we don't try to connect to the cloud backend to verify key existence. # noqa: E501 mock_has_key.return_value = True diff --git a/tests/data_context/test_data_context_data_docs_api.py b/tests/data_context/test_data_context_data_docs_api.py index 12c268bdeb8e..5644e93a2c39 100644 --- a/tests/data_context/test_data_context_data_docs_api.py +++ b/tests/data_context/test_data_context_data_docs_api.py @@ -468,8 +468,9 @@ def test_view_validation_result( } checkpoint_result = mocker.Mock(spec=CheckpointResult, run_results=run_results) - with mock.patch("webbrowser.open") as mock_open, mock.patch( - "great_expectations.data_context.store.StoreBackend.has_key", return_value=True + with ( + mock.patch("webbrowser.open") as mock_open, + mock.patch("great_expectations.data_context.store.StoreBackend.has_key", return_value=True), ): context.view_validation_result(checkpoint_result) diff --git a/tests/datasource/fluent/test_batch.py b/tests/datasource/fluent/test_batch.py index 2931da387314..60946a3eaaf7 100644 --- a/tests/datasource/fluent/test_batch.py +++ b/tests/datasource/fluent/test_batch.py @@ -37,7 +37,7 @@ def test_batch_validate_expectation(pandas_setup: Tuple[AbstractDataContext, Bat # Make Expectation expectation = gxe.ExpectColumnValuesToNotBeNull( column="vendor_id", - mostly=0.95, + mostly=0.95, # type: ignore[arg-type] # TODO: Fix in CORE-412 ) # Validate result = batch.validate(expectation) @@ -56,7 +56,7 @@ def test_batch_validate_expectation_suite( suite.add_expectation( gxe.ExpectColumnValuesToNotBeNull( column="vendor_id", - mostly=0.95, + mostly=0.95, # type: ignore[arg-type] # TODO: Fix in CORE-412 ) ) # Validate @@ -129,7 +129,7 @@ def test_batch_validate_with_updated_expectation( # Asserts on result assert result.success is False # Update expectation and validate - expectation.mostly = 0.95 + expectation.mostly = 0.95 # type: ignore[assignment] # TODO: Fix in CORE-412 result = batch.validate(expectation) assert result.success is True @@ -152,7 +152,7 @@ def test_batch_validate_expectation_suite_with_updated_expectation( expectation = suite.expectations[0] assert isinstance(expectation, gxe.ExpectColumnValuesToNotBeNull) - expectation.mostly = 0.95 + expectation.mostly = 0.95 # type: ignore[assignment] # TODO: Fix in CORE-412 expectation.save() assert isinstance(suite.expectations[0], gxe.ExpectColumnValuesToNotBeNull) @@ -165,7 +165,7 @@ def test_batch_validate_expectation_suite_with_updated_expectation( class TestBatchValidateExpectation: @pytest.fixture def expectation(self) -> Expectation: - return gxe.ExpectColumnValuesToNotBeNull(column="vendor_id", mostly=0.95) + return gxe.ExpectColumnValuesToNotBeNull(column="vendor_id", mostly=0.95) # type: ignore[arg-type] # TODO: Fix in CORE-412 @pytest.mark.filesystem def test_boolean_validation_result( @@ -209,7 +209,7 @@ class TestBatchValidateExpectationSuite: def suite(self) -> ExpectationSuite: return gx.ExpectationSuite( name="my-suite", - expectations=[gxe.ExpectColumnValuesToNotBeNull(column="vendor_id", mostly=0.95)], + expectations=[gxe.ExpectColumnValuesToNotBeNull(column="vendor_id", mostly=0.95)], # type: ignore[arg-type] # TODO: Fix in CORE-412 ) @pytest.mark.filesystem @@ -260,7 +260,7 @@ def test_batch_validate_expectation_does_not_persist_a_batch_definition( expectation = gxe.ExpectColumnValuesToNotBeNull( column="vendor_id", - mostly=0.95, + mostly=0.95, # type: ignore[arg-type] # TODO: Fix in CORE-412 ) result = batch.validate(expectation) @@ -282,7 +282,7 @@ def test_batch_validate_expectation_suite_does_not_persist_a_batch_definition( expectations=[ gxe.ExpectColumnValuesToNotBeNull( column="vendor_id", - mostly=0.95, + mostly=0.95, # type: ignore[arg-type] # TODO: Fix in CORE-412 ) ], ) diff --git a/tests/datasource/fluent/test_schemas.py b/tests/datasource/fluent/test_schemas.py index 767ceaf969fa..20f443dea55b 100644 --- a/tests/datasource/fluent/test_schemas.py +++ b/tests/datasource/fluent/test_schemas.py @@ -25,7 +25,7 @@ def min_supported_python() -> Version: - return Version("3.8") + return Version("3.9") def _models_and_schema_dirs() -> ( diff --git a/tests/integration/spark/test_spark_connect.py b/tests/integration/spark/test_spark_connect.py index 2d1e9368a7f8..9aaf2339eaea 100644 --- a/tests/integration/spark/test_spark_connect.py +++ b/tests/integration/spark/test_spark_connect.py @@ -32,7 +32,8 @@ def spark_validation_definition( name="spark-connect-suite", expectations=[ gx.expectations.ExpectColumnValuesToBeInSet( - column="column", value_set=DATAFRAME_VALUES + column="column", + value_set=DATAFRAME_VALUES, # type: ignore[arg-type] # TODO: Fix in CORE-412 ), ], ) diff --git a/tests/validator/test_v1_validator.py b/tests/validator/test_v1_validator.py index 6f911c3f0da4..5e4e113d8a6d 100644 --- a/tests/validator/test_v1_validator.py +++ b/tests/validator/test_v1_validator.py @@ -22,7 +22,7 @@ def failing_expectation() -> Expectation: return gxe.ExpectColumnValuesToBeInSet( column="event_type", - value_set=["start", "stop"], + value_set=["start", "stop"], # type: ignore[arg-type] # TODO: Fix in CORE-412 ) @@ -168,7 +168,7 @@ def test_validate_expectation_with_batch_asset_options( result = validator.validate_expectation( gxe.ExpectColumnValuesToBeInSet( column="event_type", - value_set=[desired_event_type], + value_set=[desired_event_type], # type: ignore[arg-type] # TODO: Fix in CORE-412 ) ) print(f"Result dict ->\n{pf(result)}") @@ -225,7 +225,7 @@ def test_non_cloud_validate_does_not_render_results( expectations=[ gxe.ExpectColumnValuesToBeInSet( column="event_type", - value_set=["start"], + value_set=["start"], # type: ignore[arg-type] # TODO: Fix in CORE-412 ) ], ) @@ -252,7 +252,7 @@ def test_cloud_validate_renders_results_when_appropriate( expectations=[ gxe.ExpectColumnValuesToBeInSet( column="event_type", - value_set=["start"], + value_set=["start"], # type: ignore[arg-type] # TODO: Fix in CORE-412 ) ], ) diff --git a/tests/validator/test_validation_graph.py b/tests/validator/test_validation_graph.py index b0c625cbaf53..1aca3cb4610e 100644 --- a/tests/validator/test_validation_graph.py +++ b/tests/validator/test_validation_graph.py @@ -439,23 +439,27 @@ class DummyExecutionEngine: execution_engine = cast(ExecutionEngine, DummyExecutionEngine) # ValidationGraph is a complex object that requires len > 3 to not trigger tqdm - with mock.patch( - "great_expectations.validator.validation_graph.ValidationGraph._parse", - return_value=( - {}, - {}, + with ( + mock.patch( + "great_expectations.validator.validation_graph.ValidationGraph._parse", + return_value=( + {}, + {}, + ), + ), + mock.patch( + "great_expectations.validator.validation_graph.ValidationGraph.edges", + new_callable=mock.PropertyMock, + return_value=[ + MetricEdge(left=metric_configuration), + MetricEdge(left=metric_configuration), + MetricEdge(left=metric_configuration), + ], ), - ), mock.patch( - "great_expectations.validator.validation_graph.ValidationGraph.edges", - new_callable=mock.PropertyMock, - return_value=[ - MetricEdge(left=metric_configuration), - MetricEdge(left=metric_configuration), - MetricEdge(left=metric_configuration), - ], - ), mock.patch( - "great_expectations.validator.validation_graph.tqdm", - ) as mock_tqdm: + mock.patch( + "great_expectations.validator.validation_graph.tqdm", + ) as mock_tqdm, + ): call_args = { "runtime_configuration": None, } diff --git a/tests/validator/test_validator.py b/tests/validator/test_validator.py index 7cd24df1851d..8329b00c04dd 100644 --- a/tests/validator/test_validator.py +++ b/tests/validator/test_validator.py @@ -290,7 +290,7 @@ def test_graph_validate_with_runtime_config( }, expectation_config=gxe.ExpectColumnValueZScoresToBeLessThan( column="b", - mostly=1.0, + mostly=1.0, # type: ignore[arg-type] # TODO: Fix in CORE-412 threshold=2.0, double_sided=True, ).configuration,