diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 62a9b0dd..48f06ac9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -32,6 +32,7 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} + cache: pip - id: dependencies run: | @@ -69,6 +70,7 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} + cache: pip - id: dependencies run: | diff --git a/rohmu/__init__.py b/rohmu/__init__.py index 7064802f..172a07b0 100644 --- a/rohmu/__init__.py +++ b/rohmu/__init__.py @@ -11,12 +11,52 @@ from .factory import ( Config, get_class_for_notifier, + get_class_for_storage_driver, get_class_for_transfer, get_notifier, get_transfer, + get_transfer_from_model, get_transfer_model, NOTIFIER_TYPE, STORAGE_TYPE, ) from .notifier.interface import Notifier from rohmu.object_storage.base import BaseTransfer +from rohmu.object_storage.config import ( + AzureObjectStorageConfig, + GoogleObjectStorageConfig, + LocalObjectStorageConfig, + ProxyInfo, + S3AddressingStyle, + S3ObjectStorageConfig, + SFTPObjectStorageConfig, + StorageDriver, + SwiftObjectStorageConfig, +) + +__all__ = [ + "AzureObjectStorageConfig", + "BaseTransfer", + "Config", + "get_class_for_notifier", + "get_class_for_storage_driver", + "get_class_for_transfer", + "get_notifier", + "get_transfer_from_model", + "get_transfer_model", + "get_transfer", + "GoogleObjectStorageConfig", + "InvalidConfigurationError", + "IO_BLOCK_SIZE", + "LocalObjectStorageConfig", + "NOTIFIER_TYPE", + "Notifier", + "ProxyInfo", + "S3AddressingStyle", + "S3ObjectStorageConfig", + "SFTPObjectStorageConfig", + "STORAGE_TYPE", + "StorageDriver", + "StorageModel", + "SwiftObjectStorageConfig", +] diff --git a/rohmu/common/models.py b/rohmu/common/models.py index 2586b7a9..c6d340cd 100644 --- a/rohmu/common/models.py +++ b/rohmu/common/models.py @@ -1,7 +1,6 @@ # Copyright (c) 2023 Aiven, Helsinki, Finland. https://aiven.io/ from rohmu.common.statsd import StatsdConfig from rohmu.common.strenum import StrEnum -from rohmu.notifier.interface import Notifier from typing import Optional import enum @@ -73,7 +72,6 @@ class ProxyInfo(RohmuModel): class StorageModel(pydantic.BaseModel): storage_type: StorageDriver - notifier: Optional[Notifier] = None statsd_info: Optional[StatsdConfig] = None class Config: diff --git a/rohmu/common/strenum.py b/rohmu/common/strenum.py index 4ad7f72a..ed8da07b 100644 --- a/rohmu/common/strenum.py +++ b/rohmu/common/strenum.py @@ -6,6 +6,9 @@ """ from __future__ import annotations +from typing import Optional +from typing_extensions import Self + import enum @@ -14,7 +17,7 @@ def __str__(self) -> str: return str(self.value) @classmethod - def of(cls, value: str) -> StrEnum | None: + def of(cls, value: str) -> Optional[Self]: try: return cls(value) except ValueError: diff --git a/rohmu/factory.py b/rohmu/factory.py index 1bc91732..4b119c65 100644 --- a/rohmu/factory.py +++ b/rohmu/factory.py @@ -1,44 +1,48 @@ # Copyright (c) 2023 Aiven, Helsinki, Finland. https://aiven.io/ -from .common.models import StorageModel +from .common.models import StorageDriver, StorageModel from .errors import InvalidConfigurationError from .notifier.interface import Notifier from rohmu.object_storage.base import BaseTransfer -from typing import Any, cast, Dict, Type +from rohmu.object_storage.config import StorageModelT +from typing import Any, Dict, Optional, Type STORAGE_TYPE = "storage_type" NOTIFIER_TYPE = "notifier_type" Config = Dict[str, Any] -def get_class_for_transfer(obj_store: Config) -> Type[BaseTransfer[StorageModel]]: - storage_type = obj_store[STORAGE_TYPE] - if storage_type == "azure": +def get_class_for_transfer(obj_store: Config) -> Type[BaseTransfer[Any]]: + return get_class_for_storage_driver(StorageDriver(obj_store[STORAGE_TYPE])) + + +def get_class_for_storage_driver(storage_driver: StorageDriver) -> Type[BaseTransfer[Any]]: + if storage_driver == StorageDriver.azure: from rohmu.object_storage.azure import AzureTransfer - return cast(Type[BaseTransfer[StorageModel]], AzureTransfer) - elif storage_type == "google": + return AzureTransfer + elif storage_driver == StorageDriver.google: from rohmu.object_storage.google import GoogleTransfer - return cast(Type[BaseTransfer[StorageModel]], GoogleTransfer) - elif storage_type == "sftp": + return GoogleTransfer + elif storage_driver == StorageDriver.sftp: from rohmu.object_storage.sftp import SFTPTransfer - return cast(Type[BaseTransfer[StorageModel]], SFTPTransfer) - elif storage_type == "local": + return SFTPTransfer + elif storage_driver == StorageDriver.local: from rohmu.object_storage.local import LocalTransfer - return cast(Type[BaseTransfer[StorageModel]], LocalTransfer) - elif storage_type == "s3": + return LocalTransfer + elif storage_driver == StorageDriver.s3: from rohmu.object_storage.s3 import S3Transfer - return cast(Type[BaseTransfer[StorageModel]], S3Transfer) - elif storage_type == "swift": + return S3Transfer + elif storage_driver == StorageDriver.swift: from rohmu.object_storage.swift import SwiftTransfer - return cast(Type[BaseTransfer[StorageModel]], SwiftTransfer) + return SwiftTransfer - raise InvalidConfigurationError(f"unsupported storage type {repr(storage_type)}") + raise InvalidConfigurationError(f"unsupported storage type {storage_driver!r}") def get_class_for_notifier(notifier_config: Config) -> Type[Notifier]: @@ -59,18 +63,17 @@ def get_notifier(notifier_config: Config) -> Notifier: def get_transfer_model(storage_config: Config) -> StorageModel: storage_class = get_class_for_transfer(storage_config) - storage_config = dict(storage_config) - storage_config.pop(STORAGE_TYPE) - notifier_config = storage_config.pop("notifier", None) - notifier = None - if notifier_config is not None: - notifier = get_notifier(notifier_config) - - model = storage_class.config_model(**storage_config, notifier=notifier) - return model + return storage_class.config_model(**storage_config) -def get_transfer(storage_config: Config) -> BaseTransfer[StorageModel]: - storage_class = get_class_for_transfer(storage_config) +def get_transfer(storage_config: Config) -> BaseTransfer[Any]: + storage_config = storage_config.copy() + noitifier_config = storage_config.pop("notifier") + notifier = get_notifier(noitifier_config) model = get_transfer_model(storage_config) - return storage_class.from_model(model) + return get_transfer_from_model(model, notifier) + + +def get_transfer_from_model(model: StorageModelT, notifier: Optional[Notifier] = None) -> BaseTransfer[StorageModelT]: + storage_class = get_class_for_storage_driver(model.storage_type) + return storage_class.from_model(model, notifier) diff --git a/rohmu/object_storage/base.py b/rohmu/object_storage/base.py index 9e9a13ce..33e39907 100644 --- a/rohmu/object_storage/base.py +++ b/rohmu/object_storage/base.py @@ -32,6 +32,7 @@ TypeVar, Union, ) +from typing_extensions import Self import logging import os @@ -130,8 +131,8 @@ def _should_multipart( return int(size) > chunk_size @classmethod - def from_model(cls, model: StorageModelT) -> BaseTransfer[StorageModelT]: - return cls(**model.dict(by_alias=True, exclude={"storage_type"})) + def from_model(cls, model: StorageModelT, notifier: Optional[Notifier] = None) -> Self: + return cls(**model.dict(by_alias=True, exclude={"storage_type"}), notifier=notifier) def copy_file( self, *, source_key: str, destination_key: str, metadata: Optional[Metadata] = None, **_kwargs: Any diff --git a/rohmu/object_storage/s3.py b/rohmu/object_storage/s3.py index 6daba0cf..8e09207c 100644 --- a/rohmu/object_storage/s3.py +++ b/rohmu/object_storage/s3.py @@ -59,7 +59,7 @@ def create_s3_client( aws_secret_access_key: Optional[str], aws_session_token: Optional[str], region_name: str, - verify: Optional[bool | str] = None, + verify: Optional[Union[bool, str]] = None, endpoint_url: Optional[str] = None, ) -> S3Client: s3_client = session.create_client( diff --git a/rohmu/transfer_pool.py b/rohmu/transfer_pool.py index 0292f2b9..41aa1b60 100644 --- a/rohmu/transfer_pool.py +++ b/rohmu/transfer_pool.py @@ -11,8 +11,10 @@ from .factory import get_transfer as rohmu_get_transfer from contextlib import contextmanager from rohmu.common.models import StorageModel +from rohmu.notifier.interface import Notifier from rohmu.object_storage.base import BaseTransfer from typing import Any, Callable, Generator, Optional +from typing_extensions import Self import heapq import json @@ -137,7 +139,7 @@ def __getattribute__(self, attr: str) -> Any: return super().__getattribute__(attr) @classmethod - def from_model(cls, model: StorageModel) -> BaseTransfer[StorageModel]: + def from_model(cls, model: StorageModel, notifier: Optional[Notifier] = None) -> Self: raise InvalidTransferError("You should not call class methods on SafeTransfer instances") def return_to_pool(self) -> None: diff --git a/setup.cfg b/setup.cfg index 96744eb8..3d91a225 100644 --- a/setup.cfg +++ b/setup.cfg @@ -23,6 +23,7 @@ platforms = package_dir = =. packages = find: +python_requires = >=3.8 install_requires = azure-storage-blob >= 2.1.0 botocore diff --git a/test/common/test_statsd.py b/test/common/test_statsd.py index 2efec217..7a80dd85 100644 --- a/test/common/test_statsd.py +++ b/test/common/test_statsd.py @@ -10,7 +10,7 @@ from __future__ import annotations from rohmu.common import statsd -from typing import Any +from typing import Any, Union import asyncio import pytest @@ -21,7 +21,7 @@ class _Protocol(asyncio.DatagramProtocol): def __init__(self, queue: asyncio.Queue[bytes]): self.received_queue = queue - def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: + def datagram_received(self, data: bytes, addr: tuple[Union[str, Any], int]) -> None: self.received_queue.put_nowait(data) diff --git a/test/object_storage/test_s3.py b/test/object_storage/test_s3.py index 38dc56ec..f09f31ce 100644 --- a/test/object_storage/test_s3.py +++ b/test/object_storage/test_s3.py @@ -13,7 +13,7 @@ from rohmu.object_storage.config import S3ObjectStorageConfig from rohmu.object_storage.s3 import S3Transfer from tempfile import NamedTemporaryFile -from typing import Any, BinaryIO, Callable, Iterator, Optional +from typing import Any, BinaryIO, Callable, Iterator, Optional, Union from unittest.mock import ANY, call, MagicMock, patch import pytest @@ -240,7 +240,7 @@ def test_validate_is_verify_tls_and_cert_path() -> None: (False, None, False), ], ) -def test_cert_path(is_verify_tls: bool, cert_path: Path | None, expected: str | bool) -> None: +def test_cert_path(is_verify_tls: bool, cert_path: Optional[Path], expected: Union[str, bool]) -> None: with patch.object(rohmu.object_storage.s3, "create_s3_client") as mock: S3Transfer( region="test-region", diff --git a/test/test_factory.py b/test/test_factory.py index 32b181fc..a59fc643 100644 --- a/test/test_factory.py +++ b/test/test_factory.py @@ -1,7 +1,6 @@ -from rohmu.factory import Config, get_class_for_transfer, get_transfer +from rohmu.factory import Config, get_class_for_transfer, get_transfer, get_transfer_from_model from rohmu.object_storage.config import S3ObjectStorageConfig from rohmu.object_storage.s3 import S3Transfer -from typing import cast from unittest.mock import ANY, MagicMock, Mock, patch import pytest @@ -42,18 +41,17 @@ def test_get_transfer_s3( config: Config, ) -> None: expected_config_arg = dict(config) - expected_config_arg.pop("storage_type") expected_config_arg.pop("notifier") expected_botocore_config = {"proxies": {"https": "socks5://bob:secret@proxy.test:16666"}} - mock_config_model.return_value = S3ObjectStorageConfig(**expected_config_arg, notifier=None) + mock_config_model.return_value = S3ObjectStorageConfig(**expected_config_arg) transfer_object = get_transfer(config) - mock_config_model.assert_called_once_with(**expected_config_arg, notifier=mock_notifier()) - mock_from_model.assert_called_once_with(mock_config_model()) + mock_config_model.assert_called_once_with(**expected_config_arg) + mock_from_model.assert_called_once_with(mock_config_model(), mock_notifier.return_value) + mock_notifier.assert_called_once_with(url=config["notifier"]["url"]) assert isinstance(transfer_object, S3Transfer) - # cast was the easiest way to convince mypy - assert cast(S3Transfer, transfer_object).bucket_name == "dummy-bucket" + assert transfer_object.bucket_name == "dummy-bucket" mock_botocore_config.assert_called_once_with(**expected_botocore_config) mock_s3_client.assert_called_once_with( session=ANY, @@ -70,3 +68,29 @@ def test_get_transfer_s3( @patch.dict(sys.modules, {"swiftclient": MagicMock(), "azure.common": MagicMock()}) def test_config_model_defined(storage_type: str) -> None: assert get_class_for_transfer({"storage_type": storage_type}).config_model + + +@patch("rohmu.object_storage.s3.create_s3_client") +def test_get_transfer_from_model( + create_s3_client: Mock, +) -> None: + config = S3ObjectStorageConfig( + region="dummy-region", + bucket_name="dummy-bucket", + proxy_info={ + "host": "proxy.test", + "port": "16666", + "type": "socks5", + "user": "bob", + "pass": "secret", + }, + ) + get_transfer_from_model(config) + create_s3_client.assert_called_once_with( + session=ANY, + config=ANY, + aws_access_key_id=None, + aws_secret_access_key=None, + aws_session_token=None, + region_name="dummy-region", + )