diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 9f31e4c..0000000 --- a/.flake8 +++ /dev/null @@ -1,2 +0,0 @@ -[flake8] -max-line-length = 79 \ No newline at end of file diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index d0c300d..be0760a 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -1,40 +1,28 @@ -name: linting +name: Check formatting and linting on: pull_request: push: { branches: [main] } jobs: - black: + ruff-check: + name: Run ruff lint and format checks runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.9"] - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - uses: psf/black@stable - with: - options: "--check --verbose --line-length 79" - src: "./src/sumo/wrapper" - flake8: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.9"] + steps: - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + + - uses: actions/setup-python@v5 with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 - - name: Analysing the code with flake8 - run: | - flake8 src/sumo/wrapper --config .flake8 + python-version: '3.11' + cache: 'pip' + + - name: Installing dependencies + run: pip install ruff + + - name: Run ruff lint + run: ruff check . + + - name: Run ruff format + run: ruff format . --check + diff --git a/.gitignore b/.gitignore index 1beebc1..a1ed3c9 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,5 @@ build /src/testing.py /docs/_build /src/sumo/wrapper/version.py -*_version.py \ No newline at end of file +*_version.py +.vscode/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..d3db841 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,16 @@ +repos: +- repo: local + hooks: + - id: lint + name: Ruff Lint + description: Linting using ruff + entry: bash -c 'ruff check .' + language: system + stages: ["pre-commit", "pre-push"] + + - id: format + name: Ruff Format + description: Formatting using ruff + entry: bash -c 'ruff format . --check' + language: system + stages: ["pre-commit", "pre-push"] diff --git a/docs/conf.py b/docs/conf.py index a60c67e..2119725 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -55,4 +55,3 @@ # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] - diff --git a/pyproject.toml b/pyproject.toml index 266ab34..b01df9e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,10 +5,6 @@ build-backend = "setuptools.build_meta" [tool.setuptools_scm] version_file = "src/sumo/wrapper/_version.py" - -[tool.black] -line-length = 79 - [project] name = "sumo-wrapper-python" description = "Python wrapper for the Sumo API" @@ -39,6 +35,7 @@ docs = [ "sphinx-autodoc-typehints", "sphinxcontrib-apidoc", ] +dev = ["ruff", "pre-commit"] [project.urls] Repository = "https://github.com/equinor/sumo-wrapper-python" @@ -48,3 +45,32 @@ sumo_login = "sumo.wrapper.login:main" [tool.setuptools.packages.find] where = ["src"] + +[tool.ruff] +exclude = [ + ".env", + ".git", + ".github", + ".venv", + "venv", +] + +line-length = 79 + +[tool.ruff.lint] +ignore = [ + "E501", +] + +extend-select = [ + "C4", # Flake8-comprehensions + "I", # isort + "SIM", # Flake8-simplify + "TC", # Flake8-type-checking + "TID", # Flake8-tidy-imports + "N", # pep8-naming +] + +[tool.ruff.lint.per-file-ignores] +"__init__.py" = ["F401"] + diff --git a/src/sumo/wrapper/__init__.py b/src/sumo/wrapper/__init__.py index 844c7d2..c7714de 100644 --- a/src/sumo/wrapper/__init__.py +++ b/src/sumo/wrapper/__init__.py @@ -1,5 +1,5 @@ -from .sumo_client import SumoClient from ._retry_strategy import RetryStrategy +from .sumo_client import SumoClient try: from ._version import version diff --git a/src/sumo/wrapper/_auth_provider.py b/src/sumo/wrapper/_auth_provider.py index 00c12b4..20fcc2c 100644 --- a/src/sumo/wrapper/_auth_provider.py +++ b/src/sumo/wrapper/_auth_provider.py @@ -1,19 +1,19 @@ -import msal +import errno +import json import os -from datetime import datetime, timedelta import stat import sys -import json -import jwt import time -from azure.identity import ManagedIdentityCredential -import tenacity as tn -from ._retry_strategy import _log_retry_info, _return_last_value +from datetime import datetime, timedelta +import jwt +import msal +import tenacity as tn +from azure.identity import ManagedIdentityCredential from msal_extensions.persistence import FilePersistence from msal_extensions.token_cache import PersistedTokenCache -import errno +from ._retry_strategy import _log_retry_info, _return_last_value if not sys.platform.startswith("linux"): from msal_extensions import build_encrypted_persistence @@ -422,14 +422,12 @@ def get_auth_provider( return AuthProviderDeviceCode(client_id, authority, resource_id) # ELSE if all( - [ - os.getenv(x) - for x in [ - "AZURE_FEDERATED_TOKEN_FILE", - "AZURE_TENANT_ID", - "AZURE_CLIENT_ID", - "AZURE_AUTHORITY_HOST", - ] + os.getenv(x) + for x in [ + "AZURE_FEDERATED_TOKEN_FILE", + "AZURE_TENANT_ID", + "AZURE_CLIENT_ID", + "AZURE_AUTHORITY_HOST", ] ): return AuthProviderManaged(resource_id) diff --git a/src/sumo/wrapper/_logging.py b/src/sumo/wrapper/_logging.py index a28af21..788e0a1 100644 --- a/src/sumo/wrapper/_logging.py +++ b/src/sumo/wrapper/_logging.py @@ -3,14 +3,19 @@ class LogHandlerSumo(logging.Handler): - def __init__(self, sumoClient): + def __init__(self, sumo_client): logging.Handler.__init__(self) - self._sumoClient = sumoClient + self._sumoClient = sumo_client return def emit(self, record): try: - dt = datetime.utcnow().replace(microsecond=0).isoformat() + "Z" + dt = ( + datetime.now(datetime.timezone.utc) + .replace(microsecond=0) + .isoformat() + + "Z" + ) json = { "severity": record.levelname, "message": record.getMessage(), @@ -20,7 +25,7 @@ def emit(self, record): "funcname": record.funcName, "linenumber": record.lineno, } - if "objectUuid" in record.__dict__.keys(): + if "objectUuid" in record.__dict__: json["objectUuid"] = record.__dict__.get("objectUuid") self._sumoClient.post("/message-log/new", json=json) diff --git a/src/sumo/wrapper/_retry_strategy.py b/src/sumo/wrapper/_retry_strategy.py index a09fddf..7a2d5bc 100644 --- a/src/sumo/wrapper/_retry_strategy.py +++ b/src/sumo/wrapper/_retry_strategy.py @@ -1,5 +1,5 @@ -import tenacity as tn import httpx +import tenacity as tn def _log_retry_info(retry_state): diff --git a/src/sumo/wrapper/login.py b/src/sumo/wrapper/login.py index 2245d4b..8208e7d 100644 --- a/src/sumo/wrapper/login.py +++ b/src/sumo/wrapper/login.py @@ -1,9 +1,9 @@ import logging import platform -from pathlib import Path from argparse import ArgumentParser -from sumo.wrapper import SumoClient +from pathlib import Path +from sumo.wrapper import SumoClient logger = logging.getLogger("sumo.wrapper") logger.setLevel(level="CRITICAL") diff --git a/src/sumo/wrapper/sumo_client.py b/src/sumo/wrapper/sumo_client.py index ed4c205..fb8c59c 100644 --- a/src/sumo/wrapper/sumo_client.py +++ b/src/sumo/wrapper/sumo_client.py @@ -1,20 +1,20 @@ -import logging import asyncio +import contextlib +import logging +import re + import httpx import jwt -import re -from ._blob_client import BlobClient -from ._logging import LogHandlerSumo from ._auth_provider import get_auth_provider -from .config import APP_REGISTRATION, TENANT_ID, AUTHORITY_HOST_URI - +from ._blob_client import BlobClient from ._decorators import ( raise_for_status, raise_for_status_async, ) - +from ._logging import LogHandlerSumo from ._retry_strategy import RetryStrategy +from .config import APP_REGISTRATION, AUTHORITY_HOST_URI, TENANT_ID logger = logging.getLogger("sumo.wrapper") @@ -61,12 +61,10 @@ def __init__( logger.debug("Token provided") payload = None - try: + with contextlib.suppress(jwt.InvalidTokenError): payload = jwt.decode( token, options={"verify_signature": False} ) - except jwt.InvalidTokenError: - pass if payload: logger.debug(f"Token decoded as JWT, payload: {payload}") @@ -380,7 +378,7 @@ def _delete(): return retryer(_delete) - def getLogger(self, name): + def get_logger(self, name): """Gets a logger object that sends log objects into the message_log index for the Sumo instance. diff --git a/conftest.py b/tests/conftest.py similarity index 76% rename from conftest.py rename to tests/conftest.py index ce6f27c..0a9d23c 100644 --- a/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,7 @@ import os +from sumo.wrapper import SumoClient + def pytest_addoption(parser): parser.addoption("--token", action="store", default="") @@ -10,5 +12,8 @@ def pytest_generate_tests(metafunc): token = os.environ.get("ACCESS_TOKEN") token = token if token and len(token) > 0 else None + if token is None: + _ = SumoClient(env="dev", interactive=True) + if "token" in metafunc.fixturenames: metafunc.parametrize("token", [token]) diff --git a/tests/test_sumo_thin_client.py b/tests/test_sumo_thin_client.py index 2db3619..8f3201a 100644 --- a/tests/test_sumo_thin_client.py +++ b/tests/test_sumo_thin_client.py @@ -1,32 +1,28 @@ """Example code for communicating with Sumo""" -import pytest -import yaml -from time import sleep -import sys import os +import sys import uuid +from time import sleep + +import pytest +import yaml sys.path.append(os.path.abspath(os.path.join("src"))) from sumo.wrapper import SumoClient # noqa: E402 -class Connection: - def __init__(self, token): - self.api = SumoClient(env="dev", token=token) - - -def _upload_parent_object(C, json): - response = C.api.post("/objects", json=json) +def _upload_parent_object(conn, json): + response = conn.post("/objects", json=json) if not 200 <= response.status_code < 202: raise Exception(f"code: {response.status_code}, text: {response.text}") return response -def _upload_blob(C, blob, url=None, object_id=None): - response = C.api.blob_client.upload_blob(blob=blob, url=url) +def _upload_blob(conn, blob, url=None, object_id=None): + response = conn.blob_client.upload_blob(blob=blob, url=url) print("Blob save " + str(response.status_code), flush=True) if not 200 <= response.status_code < 202: @@ -37,8 +33,8 @@ def _upload_blob(C, blob, url=None, object_id=None): return response -def _get_blob_uri(C, object_id): - response = C.api.get(f"/objects('{object_id}')/blob/authuri") +def _get_blob_uri(conn, object_id): + response = conn.get(f"/objects('{object_id}')/blob/authuri") print("Blob save " + str(response.status_code), flush=True) if not 200 <= response.status_code < 202: @@ -49,14 +45,14 @@ def _get_blob_uri(C, object_id): return response -def _download_object(C, object_id): - json = C.api.get(f"/objects('{object_id}')").json() +def _download_object(conn, object_id): + json = conn.get(f"/objects('{object_id}')").json() return json -def _upload_child_level_json(C, parent_id, json): - response = C.api.post(f"/objects('{parent_id}')", json=json) +def _upload_child_level_json(conn, parent_id, json): + response = conn.post(f"/objects('{parent_id}')", json=json) if not 200 <= response.status_code < 202: raise Exception( @@ -65,8 +61,8 @@ def _upload_child_level_json(C, parent_id, json): return response -def _delete_object(C, object_id): - response = C.api.delete(f"/objects('{object_id}')").json() +def _delete_object(conn, object_id): + response = conn.delete(f"/objects('{object_id}')").json() return response @@ -88,8 +84,8 @@ def test_upload_search_delete_ensemble_child(token): those objects to make sure they are available to the user. We then delete them and repeat the search to check if they were properly removed from sumo. """ - C = Connection(token) - B = b"123456789" + sumo_client = SumoClient(env="dev", token=token) + blob = b"123456789" # Upload Ensemble with open("tests/testdata/case.yml", "r") as stream: @@ -98,7 +94,9 @@ def test_upload_search_delete_ensemble_child(token): case_uuid = str(uuid.uuid4()) fmu_case_metadata["fmu"]["case"]["uuid"] = case_uuid - response_case = _upload_parent_object(C=C, json=fmu_case_metadata) + response_case = _upload_parent_object( + conn=sumo_client, json=fmu_case_metadata + ) assert 200 <= response_case.status_code <= 202 assert isinstance(response_case.json(), dict) @@ -115,7 +113,7 @@ def test_upload_search_delete_ensemble_child(token): fmu_surface_metadata["fmu"]["case"]["uuid"] = case_uuid response_surface = _upload_child_level_json( - C=C, parent_id=case_id, json=fmu_surface_metadata + conn=sumo_client, parent_id=case_id, json=fmu_surface_metadata ) assert 200 <= response_surface.status_code <= 202 @@ -126,7 +124,7 @@ def test_upload_search_delete_ensemble_child(token): # Upload BLOB response_blob = _upload_blob( - C=C, blob=B, url=blob_url, object_id=surface_id + conn=sumo_client, blob=blob, url=blob_url, object_id=surface_id ) assert 200 <= response_blob.status_code <= 202 @@ -135,7 +133,7 @@ def test_upload_search_delete_ensemble_child(token): # Search for ensemble query = f"fmu.case.uuid:{case_uuid}" - search_results = C.api.get( + search_results = sumo_client.get( "/searchroot", params={"$query": query, "$select": ["_source"]} ).json() @@ -144,28 +142,28 @@ def test_upload_search_delete_ensemble_child(token): assert hits[0].get("_id") == case_id # Search for child object - search_results = C.api.get( + search_results = sumo_client.get( "/search", {"$query": query, "$select": ["_source"]} ).json() total = search_results.get("hits").get("total").get("value") assert total == 2 - get_result = _download_object(C, object_id=surface_id) + get_result = _download_object(sumo_client, object_id=surface_id) assert get_result["_id"] == surface_id # Search for blob - bin_obj = C.api.get(f"/objects('{surface_id}')/blob").content - assert bin_obj == B + bin_obj = sumo_client.get(f"/objects('{surface_id}')/blob").content + assert bin_obj == blob # Delete Ensemble - result = _delete_object(C=C, object_id=case_id) + result = _delete_object(conn=sumo_client, object_id=case_id) assert result == "Accepted" sleep(40) # Search for ensemble - search_results = C.api.get( + search_results = sumo_client.get( "/searchroot", {"$query": query, "$select": ["_source"]} ).json() @@ -174,7 +172,7 @@ def test_upload_search_delete_ensemble_child(token): assert len(hits) == 0 # Search for child object - search_results = C.api.get( + search_results = sumo_client.get( "/search", {"$query": query, "$select": ["_source"]} ).json() total = search_results.get("hits").get("total").get("value") @@ -185,16 +183,18 @@ def test_fail_on_wrong_metadata(token): """ Upload a parent object with erroneous metadata, confirm failure """ - C = Connection(token) + conn = SumoClient(env="dev", token=token) with pytest.raises(Exception): - assert _upload_parent_object(C=C, json={"some field": "some value"}) + assert _upload_parent_object( + conn=conn, json={"some field": "some value"} + ) def test_upload_duplicate_ensemble(token): """ Adding a duplicate ensemble, both tries must return same id. """ - C = Connection(token) + conn = SumoClient(env="dev", token=token) with open("tests/testdata/case.yml", "r") as stream: fmu_metadata1 = yaml.safe_load(stream) @@ -207,23 +207,23 @@ def test_upload_duplicate_ensemble(token): fmu_metadata2["fmu"]["case"]["uuid"] = case_uuid # upload case metadata, get object_id - response1 = _upload_parent_object(C=C, json=fmu_metadata1) + response1 = _upload_parent_object(conn=conn, json=fmu_metadata1) assert 200 <= response1.status_code <= 202 # upload duplicated case metadata, get object_id - response2 = _upload_parent_object(C=C, json=fmu_metadata2) + response2 = _upload_parent_object(conn=conn, json=fmu_metadata2) assert 200 <= response2.status_code <= 202 case_id1 = response1.json().get("objectid") case_id2 = response2.json().get("objectid") assert case_id1 == case_id2 - get_result = _download_object(C, object_id=case_id1) + get_result = _download_object(conn, object_id=case_id1) assert get_result["_id"] == case_id1 # Delete Ensemble sleep(5) - result = _delete_object(C=C, object_id=case_id1) + result = _delete_object(conn=conn, object_id=case_id1) assert result == "Accepted" # Ugly: sumo-core has a cache for case objects, which has a @@ -236,4 +236,4 @@ def test_upload_duplicate_ensemble(token): # Search for ensemble with pytest.raises(Exception): - assert _download_object(C, object_id=case_id2) + assert _download_object(conn, object_id=case_id2)