diff --git a/src/prefecto/testing/s3.py b/src/prefecto/testing/s3.py index 23c1adc..e8b6e75 100644 --- a/src/prefecto/testing/s3.py +++ b/src/prefecto/testing/s3.py @@ -29,6 +29,7 @@ def mock_bucket( keys: list[str] | None = None, processes: int = 3, chunksize: int = 5, + activate_moto: bool = True, ) -> Generator[Any, None, None]: """Creates a mock S3 bucket with `moto`. If given an export path, the mock bucket will export its contents during teardown. @@ -39,6 +40,7 @@ def mock_bucket( keys (list[str] | None, optional): The keys to export. If None, all keys will be exported. processes (int, optional): The number of threads to use for exporting. Defaults to 3. chunksize (int, optional): The chunksize to use for exporting. Defaults to 5. + activate_moto (bool, optional): Whether to activate the moto mock environment. Defaults to True. Yields: The mocked bucket. @@ -75,32 +77,61 @@ def mock_bucket( └── subfolder/ └── test-key-2.txt ``` + + You can deactivate moto's mock environment by setting `activate_moto=False`. This is useful + for nesting mock buckets or mocking multiple buckets in the same context. + + ```python + from prefecto.testing.s3 import mock_bucket + + with mock_bucket("my-bucket", export_path="path/to/export/dir") as my_bucket: + my_bucket.put_object(Key="test-key-1.txt", Body=b"test value 1") + with mock_bucket("my-bucket-2", export_path="path/to/export/dir", activate_moto=False) as my_bucket_2: + my_bucket_2.put_object(Key="test-key-2.txt", Body=b"test value 2") + ``` + ```text + path/ + └── to/ + └── export/ + └── dir/ + ├── my-bucket/ + | └── test-key-1.txt + └── my-bucket-2/ + └── test-key-2.txt + ``` """ - with mock_s3(): - try: - s3 = boto3.resource("s3") - bucket = s3.Bucket(bucket_name) - bucket.create() - yield bucket - finally: - if export_path is not None: - - if keys is None: - objects = bucket.objects.all() - else: - objects = (bucket.Object(key) for key in keys) - - # Resolve the export path and create the bucket directory - bucket_path = Path(export_path) / bucket.name - bucket_path.mkdir(parents=True, exist_ok=True) - - if processes > 1: - # Export the objects - with ThreadPool(processes) as pool: - pool.starmap( - _export, - zip(objects, repeat(bucket_path)), - chunksize=chunksize, - ) - else: - map(_export, objects, repeat(bucket_path)) + # Activate the moto mock environment + mock_env = None + if activate_moto: + mock_env = mock_s3() + mock_env.__enter__() + try: + s3 = boto3.resource("s3") + bucket = s3.Bucket(bucket_name) + bucket.create() + yield bucket + finally: + if export_path is not None: + + if keys is None: + objects = bucket.objects.all() + else: + objects = (bucket.Object(key) for key in keys) + + # Resolve the export path and create the bucket directory + bucket_path = Path(export_path) / bucket.name + bucket_path.mkdir(parents=True, exist_ok=True) + + if processes > 1: + # Export the objects + with ThreadPool(processes) as pool: + pool.starmap( + _export, + zip(objects, repeat(bucket_path)), + chunksize=chunksize, + ) + else: + map(_export, objects, repeat(bucket_path)) + + if activate_moto and mock_env is not None: + mock_env.__exit__() diff --git a/tests/testing/test_s3.py b/tests/testing/test_s3.py index 453e81c..bf7bd8b 100644 --- a/tests/testing/test_s3.py +++ b/tests/testing/test_s3.py @@ -81,3 +81,19 @@ def test_mock_bucket_export( for path in unexpected_paths: path = tmpdir / path assert not path.is_file() + + +def test_nested_mock_bucket_export(): + """Tests the `mock_bucket` export behavior when nested.""" + with tempfile.TemporaryDirectory() as tmpdir: + tmpdir = Path(tmpdir) + + with mock_bucket("test-bucket", export_path=tmpdir) as bucket: + bucket.put_object(Key="test-key-1", Body=b"test value") + with mock_bucket( + "test-bucket-2", export_path=tmpdir, activate_moto=False + ) as bucket_2: + bucket_2.put_object(Key="test-key-2", Body=b"test value") + + assert (tmpdir / "test-bucket/test-key-1").is_file() + assert (tmpdir / "test-bucket-2/test-key-2").is_file()